Jan 26 10:43:03 crc systemd[1]: Starting Kubernetes Kubelet... Jan 26 10:43:03 crc restorecon[4697]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 10:43:03 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 10:43:04 crc restorecon[4697]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 26 10:43:04 crc kubenswrapper[5003]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 10:43:04 crc kubenswrapper[5003]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 26 10:43:04 crc kubenswrapper[5003]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 10:43:04 crc kubenswrapper[5003]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 10:43:04 crc kubenswrapper[5003]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 26 10:43:04 crc kubenswrapper[5003]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.824608 5003 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829168 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829199 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829206 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829216 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829225 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829233 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829240 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829247 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829254 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829260 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829266 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829273 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829303 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829310 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829316 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829323 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829329 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829338 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829347 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829354 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829362 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829370 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829377 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829384 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829391 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829399 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829405 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829412 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829419 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829426 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829432 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829451 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829458 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829467 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829475 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829482 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829491 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829499 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829507 5003 feature_gate.go:330] unrecognized feature gate: Example Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829514 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829521 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829527 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829534 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829542 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829550 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829558 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829567 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829574 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829581 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829588 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829595 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829601 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829608 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829615 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829621 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829631 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829640 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829646 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829651 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829657 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829663 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829669 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829676 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829683 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829689 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829696 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829706 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829712 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829719 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829725 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.829731 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829841 5003 flags.go:64] FLAG: --address="0.0.0.0" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829852 5003 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829862 5003 flags.go:64] FLAG: --anonymous-auth="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829869 5003 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829877 5003 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829884 5003 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829892 5003 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829907 5003 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829916 5003 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829923 5003 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829932 5003 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829940 5003 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829949 5003 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829956 5003 flags.go:64] FLAG: --cgroup-root="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829964 5003 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829971 5003 flags.go:64] FLAG: --client-ca-file="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829977 5003 flags.go:64] FLAG: --cloud-config="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829986 5003 flags.go:64] FLAG: --cloud-provider="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.829992 5003 flags.go:64] FLAG: --cluster-dns="[]" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830001 5003 flags.go:64] FLAG: --cluster-domain="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830007 5003 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830013 5003 flags.go:64] FLAG: --config-dir="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830021 5003 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830028 5003 flags.go:64] FLAG: --container-log-max-files="5" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830042 5003 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830049 5003 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830055 5003 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830061 5003 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830067 5003 flags.go:64] FLAG: --contention-profiling="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830074 5003 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830079 5003 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830086 5003 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830093 5003 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830101 5003 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830107 5003 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830113 5003 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830119 5003 flags.go:64] FLAG: --enable-load-reader="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830125 5003 flags.go:64] FLAG: --enable-server="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830131 5003 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830139 5003 flags.go:64] FLAG: --event-burst="100" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830145 5003 flags.go:64] FLAG: --event-qps="50" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830151 5003 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830158 5003 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830165 5003 flags.go:64] FLAG: --eviction-hard="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830172 5003 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830178 5003 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830184 5003 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830190 5003 flags.go:64] FLAG: --eviction-soft="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830196 5003 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830203 5003 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830209 5003 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830216 5003 flags.go:64] FLAG: --experimental-mounter-path="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830223 5003 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830229 5003 flags.go:64] FLAG: --fail-swap-on="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830234 5003 flags.go:64] FLAG: --feature-gates="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830242 5003 flags.go:64] FLAG: --file-check-frequency="20s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830248 5003 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830254 5003 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830260 5003 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830267 5003 flags.go:64] FLAG: --healthz-port="10248" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830272 5003 flags.go:64] FLAG: --help="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830299 5003 flags.go:64] FLAG: --hostname-override="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830306 5003 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830311 5003 flags.go:64] FLAG: --http-check-frequency="20s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830317 5003 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830323 5003 flags.go:64] FLAG: --image-credential-provider-config="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830329 5003 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830335 5003 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830341 5003 flags.go:64] FLAG: --image-service-endpoint="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830348 5003 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830354 5003 flags.go:64] FLAG: --kube-api-burst="100" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830359 5003 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830366 5003 flags.go:64] FLAG: --kube-api-qps="50" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830372 5003 flags.go:64] FLAG: --kube-reserved="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830378 5003 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830384 5003 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830390 5003 flags.go:64] FLAG: --kubelet-cgroups="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830396 5003 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830402 5003 flags.go:64] FLAG: --lock-file="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830409 5003 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830416 5003 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830422 5003 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830431 5003 flags.go:64] FLAG: --log-json-split-stream="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830438 5003 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830444 5003 flags.go:64] FLAG: --log-text-split-stream="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830449 5003 flags.go:64] FLAG: --logging-format="text" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830455 5003 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830462 5003 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830468 5003 flags.go:64] FLAG: --manifest-url="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830474 5003 flags.go:64] FLAG: --manifest-url-header="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830481 5003 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830487 5003 flags.go:64] FLAG: --max-open-files="1000000" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830495 5003 flags.go:64] FLAG: --max-pods="110" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830501 5003 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830508 5003 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830514 5003 flags.go:64] FLAG: --memory-manager-policy="None" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830521 5003 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830527 5003 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830533 5003 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830540 5003 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830553 5003 flags.go:64] FLAG: --node-status-max-images="50" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830559 5003 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830565 5003 flags.go:64] FLAG: --oom-score-adj="-999" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830571 5003 flags.go:64] FLAG: --pod-cidr="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830579 5003 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830593 5003 flags.go:64] FLAG: --pod-manifest-path="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830601 5003 flags.go:64] FLAG: --pod-max-pids="-1" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830608 5003 flags.go:64] FLAG: --pods-per-core="0" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830615 5003 flags.go:64] FLAG: --port="10250" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830623 5003 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830631 5003 flags.go:64] FLAG: --provider-id="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830638 5003 flags.go:64] FLAG: --qos-reserved="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830645 5003 flags.go:64] FLAG: --read-only-port="10255" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830652 5003 flags.go:64] FLAG: --register-node="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830660 5003 flags.go:64] FLAG: --register-schedulable="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830669 5003 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830682 5003 flags.go:64] FLAG: --registry-burst="10" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830690 5003 flags.go:64] FLAG: --registry-qps="5" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830697 5003 flags.go:64] FLAG: --reserved-cpus="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830705 5003 flags.go:64] FLAG: --reserved-memory="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830714 5003 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830720 5003 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830726 5003 flags.go:64] FLAG: --rotate-certificates="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830732 5003 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830739 5003 flags.go:64] FLAG: --runonce="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830745 5003 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830751 5003 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830758 5003 flags.go:64] FLAG: --seccomp-default="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830764 5003 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830770 5003 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830777 5003 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830784 5003 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830790 5003 flags.go:64] FLAG: --storage-driver-password="root" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830795 5003 flags.go:64] FLAG: --storage-driver-secure="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830801 5003 flags.go:64] FLAG: --storage-driver-table="stats" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830808 5003 flags.go:64] FLAG: --storage-driver-user="root" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830813 5003 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830820 5003 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830826 5003 flags.go:64] FLAG: --system-cgroups="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830832 5003 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830842 5003 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830848 5003 flags.go:64] FLAG: --tls-cert-file="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830854 5003 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830862 5003 flags.go:64] FLAG: --tls-min-version="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830868 5003 flags.go:64] FLAG: --tls-private-key-file="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830874 5003 flags.go:64] FLAG: --topology-manager-policy="none" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830881 5003 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830887 5003 flags.go:64] FLAG: --topology-manager-scope="container" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830894 5003 flags.go:64] FLAG: --v="2" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830903 5003 flags.go:64] FLAG: --version="false" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830913 5003 flags.go:64] FLAG: --vmodule="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830924 5003 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.830933 5003 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831083 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831090 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831096 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831101 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831107 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831112 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831117 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831122 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831128 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831133 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831138 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831144 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831149 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831154 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831159 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831165 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831172 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831179 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831186 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831192 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831198 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831205 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831211 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831217 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831224 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831238 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831244 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831250 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831256 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831262 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831269 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831581 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831596 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831604 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831614 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831622 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831629 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831635 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831642 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831649 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831656 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831664 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831672 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831681 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831690 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831697 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831704 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831712 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831720 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831727 5003 feature_gate.go:330] unrecognized feature gate: Example Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831734 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831740 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831747 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831754 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831760 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831767 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831774 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831782 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831788 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831795 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831800 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831806 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831811 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831816 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831821 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831827 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831832 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831837 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831842 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831847 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.831853 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.831871 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.841728 5003 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.841779 5003 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841901 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841910 5003 feature_gate.go:330] unrecognized feature gate: Example Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841914 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841919 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841922 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841926 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841929 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841933 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841936 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841941 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841944 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841949 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841957 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841961 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841965 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841969 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841973 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841977 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841981 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841985 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841989 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841994 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.841998 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842001 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842005 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842009 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842012 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842016 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842020 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842024 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842029 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842036 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842040 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842047 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842062 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842067 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842072 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842076 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842082 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842086 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842091 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842096 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842100 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842105 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842110 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842114 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842119 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842123 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842129 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842135 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842140 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842145 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842149 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842153 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842158 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842162 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842167 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842171 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842176 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842180 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842185 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842189 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842195 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842202 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842207 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842211 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842216 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842221 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842225 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842229 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842262 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.842273 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842455 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842468 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842473 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842478 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842484 5003 feature_gate.go:330] unrecognized feature gate: Example Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842489 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842493 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842497 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842502 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842514 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842519 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842523 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842528 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842532 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842537 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842542 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842546 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842552 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842559 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842563 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842568 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842573 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842577 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842584 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842589 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842594 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842599 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842604 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842608 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842613 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842617 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842622 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842626 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842631 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842643 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842647 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842653 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842658 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842663 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842668 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842674 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842680 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842686 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842690 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842695 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842698 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842702 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842706 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842710 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842714 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842717 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842721 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842724 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842728 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842731 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842737 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842742 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842746 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842750 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842754 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842758 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842762 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842766 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842770 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842774 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842780 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842785 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842790 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842795 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842800 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.842815 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.842824 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.843141 5003 server.go:940] "Client rotation is on, will bootstrap in background" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.847728 5003 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.847862 5003 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.848592 5003 server.go:997] "Starting client certificate rotation" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.848623 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.849011 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-10 23:11:01.823979175 +0000 UTC Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.849138 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.856807 5003 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 10:43:04 crc kubenswrapper[5003]: E0126 10:43:04.856840 5003 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.858431 5003 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.875311 5003 log.go:25] "Validated CRI v1 runtime API" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.908377 5003 log.go:25] "Validated CRI v1 image API" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.910015 5003 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.912873 5003 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-26-10-39-25-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.912912 5003 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.926802 5003 manager.go:217] Machine: {Timestamp:2026-01-26 10:43:04.924875176 +0000 UTC m=+0.466100757 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:587875d7-ac1e-443a-baca-4a26e90f0b87 BootID:6e521106-5ceb-4879-a461-45cda76aa109 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:fe:58:a8 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:fe:58:a8 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:5e:c4:21 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:f5:29:19 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:fc:89:5e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:cc:52:0a Speed:-1 Mtu:1496} {Name:eth10 MacAddress:d6:a1:76:d1:dc:02 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:32:7d:f7:5b:e5:4d Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.927021 5003 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.927149 5003 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.927776 5003 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.927993 5003 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.928037 5003 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.928295 5003 topology_manager.go:138] "Creating topology manager with none policy" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.928333 5003 container_manager_linux.go:303] "Creating device plugin manager" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.928563 5003 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.928593 5003 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.928840 5003 state_mem.go:36] "Initialized new in-memory state store" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.928941 5003 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.929652 5003 kubelet.go:418] "Attempting to sync node with API server" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.929680 5003 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.929708 5003 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.929721 5003 kubelet.go:324] "Adding apiserver pod source" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.929732 5003 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.931773 5003 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.932263 5003 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.933570 5003 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.933811 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.933862 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:04 crc kubenswrapper[5003]: E0126 10:43:04.934031 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:04 crc kubenswrapper[5003]: E0126 10:43:04.933942 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934730 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934761 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934772 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934783 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934799 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934815 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934825 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934841 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934853 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934863 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934880 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.934895 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.935449 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.936222 5003 server.go:1280] "Started kubelet" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.936458 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.938872 5003 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.939910 5003 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 26 10:43:04 crc systemd[1]: Started Kubernetes Kubelet. Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.941431 5003 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.945549 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.945593 5003 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.945896 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 07:05:29.172468292 +0000 UTC Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.946012 5003 server.go:460] "Adding debug handlers to kubelet server" Jan 26 10:43:04 crc kubenswrapper[5003]: E0126 10:43:04.946350 5003 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.946384 5003 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.946414 5003 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.946419 5003 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 26 10:43:04 crc kubenswrapper[5003]: W0126 10:43:04.947585 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:04 crc kubenswrapper[5003]: E0126 10:43:04.947696 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:04 crc kubenswrapper[5003]: E0126 10:43:04.947799 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="200ms" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.948834 5003 factory.go:55] Registering systemd factory Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.948865 5003 factory.go:221] Registration of the systemd container factory successfully Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.949409 5003 factory.go:153] Registering CRI-O factory Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.949433 5003 factory.go:221] Registration of the crio container factory successfully Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.951685 5003 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.951736 5003 factory.go:103] Registering Raw factory Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.951757 5003 manager.go:1196] Started watching for new ooms in manager Jan 26 10:43:04 crc kubenswrapper[5003]: E0126 10:43:04.948067 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.192:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188e41ebe01b5baf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 10:43:04.936192943 +0000 UTC m=+0.477418524,LastTimestamp:2026-01-26 10:43:04.936192943 +0000 UTC m=+0.477418524,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.953121 5003 manager.go:319] Starting recovery of all containers Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957795 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957834 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957848 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957858 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957868 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957877 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957886 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957896 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957911 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957924 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957936 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957954 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957967 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957980 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.957992 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958053 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958071 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958080 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958090 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958098 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958107 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958117 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958129 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958139 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958151 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958162 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958176 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958196 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958207 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958225 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958236 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958253 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958267 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958342 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958360 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958375 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958389 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958406 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958422 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958436 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958451 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958466 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958482 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958498 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958511 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958542 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958555 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958567 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958604 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958618 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958630 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958642 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958659 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958671 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.958684 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959484 5003 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959527 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959542 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959555 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959568 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959593 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959606 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959616 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959627 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959641 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959653 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959665 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959677 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959691 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959703 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959725 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959741 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959770 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959786 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959810 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959825 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959848 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959865 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959882 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959898 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959914 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959960 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959980 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.959996 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960012 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960026 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960063 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960080 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960097 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960112 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960128 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960143 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960167 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960182 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960197 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960213 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960229 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960246 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960264 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960297 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960338 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960363 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960382 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960409 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960429 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960458 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960478 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960494 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960510 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960528 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960545 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960561 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960576 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960598 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960614 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960653 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960669 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960684 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.960709 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961252 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961299 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961317 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961342 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961358 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961375 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961394 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961410 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961425 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961443 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961458 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961476 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961493 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961507 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961525 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961544 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961561 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961586 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961603 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961620 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961637 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961652 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961669 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961684 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961698 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961712 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961728 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961743 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961761 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961776 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961793 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961811 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961825 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961840 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961855 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961872 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961893 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961909 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961923 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961939 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961953 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961968 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961983 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.961997 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962014 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962030 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962046 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962066 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962084 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962099 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962117 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962129 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962145 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962160 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962175 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962191 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962207 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962232 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962249 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962265 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962315 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962333 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962354 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962374 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962389 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962406 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962422 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962436 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962459 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962480 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962497 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962510 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962530 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962581 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962602 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962622 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962640 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962664 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962684 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962700 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962719 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962739 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962755 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962771 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962786 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962801 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962820 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962833 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962851 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962865 5003 reconstruct.go:97] "Volume reconstruction finished" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.962878 5003 reconciler.go:26] "Reconciler: start to sync state" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.982631 5003 manager.go:324] Recovery completed Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.995522 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.998094 5003 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.998306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.998377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.998393 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.999705 5003 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.999728 5003 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 26 10:43:04 crc kubenswrapper[5003]: I0126 10:43:04.999747 5003 state_mem.go:36] "Initialized new in-memory state store" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.000072 5003 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.000409 5003 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.000462 5003 kubelet.go:2335] "Starting kubelet main sync loop" Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.000516 5003 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 26 10:43:05 crc kubenswrapper[5003]: W0126 10:43:05.002435 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.003421 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.007395 5003 policy_none.go:49] "None policy: Start" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.008086 5003 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.008109 5003 state_mem.go:35] "Initializing new in-memory state store" Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.047191 5003 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.059553 5003 manager.go:334] "Starting Device Plugin manager" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.059606 5003 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.059620 5003 server.go:79] "Starting device plugin registration server" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.060067 5003 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.060087 5003 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.060480 5003 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.060608 5003 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.060657 5003 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.066159 5003 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.101365 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.101485 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.102768 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.102823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.102835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.103019 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.103688 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.103781 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.104088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.104140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.104148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.104363 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.104688 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.104743 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.105103 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.105141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.105159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.105850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.105880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.105892 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106065 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106394 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106469 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106906 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.106916 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107015 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107145 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107170 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107330 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107356 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107782 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.107969 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.108002 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.108492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.108528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.108542 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.108643 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.108662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.108672 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.149367 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="400ms" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.160488 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.163153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.163314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.163401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.163506 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.164151 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.192:6443: connect: connection refused" node="crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165485 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165520 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165539 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165555 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165577 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165691 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165714 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165734 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165755 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165772 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165791 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165810 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165856 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165903 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.165938 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266530 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266590 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266614 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266644 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266664 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266681 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266701 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266727 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266751 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266762 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266793 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266803 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266821 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266843 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266887 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266895 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266898 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266898 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266928 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266970 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266977 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266942 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266887 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.267034 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266949 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.267038 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.266979 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.267090 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.267102 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.267067 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.295379 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.192:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188e41ebe01b5baf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 10:43:04.936192943 +0000 UTC m=+0.477418524,LastTimestamp:2026-01-26 10:43:04.936192943 +0000 UTC m=+0.477418524,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.364460 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.365964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.365999 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.366010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.366042 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.366561 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.192:6443: connect: connection refused" node="crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.428571 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.442746 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.468621 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: W0126 10:43:05.474978 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-23785bb42ce7f8bcef0085e9b62846eba6c00fd53af0e1cf497c8baf4c719e8a WatchSource:0}: Error finding container 23785bb42ce7f8bcef0085e9b62846eba6c00fd53af0e1cf497c8baf4c719e8a: Status 404 returned error can't find the container with id 23785bb42ce7f8bcef0085e9b62846eba6c00fd53af0e1cf497c8baf4c719e8a Jan 26 10:43:05 crc kubenswrapper[5003]: W0126 10:43:05.479671 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-8c90f9ea4da656fc643ebfa3e37c0bd1bf35cdc454ebfefc4054ed2f7dbb0829 WatchSource:0}: Error finding container 8c90f9ea4da656fc643ebfa3e37c0bd1bf35cdc454ebfefc4054ed2f7dbb0829: Status 404 returned error can't find the container with id 8c90f9ea4da656fc643ebfa3e37c0bd1bf35cdc454ebfefc4054ed2f7dbb0829 Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.484872 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.494144 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:05 crc kubenswrapper[5003]: W0126 10:43:05.500220 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-58311e47f3315284ce9ef4c691de3ad705c3cea067a750864bd808ffe3eba083 WatchSource:0}: Error finding container 58311e47f3315284ce9ef4c691de3ad705c3cea067a750864bd808ffe3eba083: Status 404 returned error can't find the container with id 58311e47f3315284ce9ef4c691de3ad705c3cea067a750864bd808ffe3eba083 Jan 26 10:43:05 crc kubenswrapper[5003]: W0126 10:43:05.506554 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c6197b889685b111088722f55c1e9ed10ea409c2feec7f3f5a10da5e6ed09d7f WatchSource:0}: Error finding container c6197b889685b111088722f55c1e9ed10ea409c2feec7f3f5a10da5e6ed09d7f: Status 404 returned error can't find the container with id c6197b889685b111088722f55c1e9ed10ea409c2feec7f3f5a10da5e6ed09d7f Jan 26 10:43:05 crc kubenswrapper[5003]: W0126 10:43:05.513667 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-c093cae9faf7ca218832fc5673ba85946777f72fce3ed174047e14e7e01f37c9 WatchSource:0}: Error finding container c093cae9faf7ca218832fc5673ba85946777f72fce3ed174047e14e7e01f37c9: Status 404 returned error can't find the container with id c093cae9faf7ca218832fc5673ba85946777f72fce3ed174047e14e7e01f37c9 Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.550350 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="800ms" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.767216 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.769002 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.769046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.769058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.769090 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 10:43:05 crc kubenswrapper[5003]: E0126 10:43:05.769657 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.192:6443: connect: connection refused" node="crc" Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.938007 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:05 crc kubenswrapper[5003]: I0126 10:43:05.946546 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 12:47:50.840092531 +0000 UTC Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.006896 5003 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3" exitCode=0 Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.006982 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.007071 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c093cae9faf7ca218832fc5673ba85946777f72fce3ed174047e14e7e01f37c9"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.007211 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.008809 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.008844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.008862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.010570 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.010620 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c6197b889685b111088722f55c1e9ed10ea409c2feec7f3f5a10da5e6ed09d7f"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.012432 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a" exitCode=0 Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.012514 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.012557 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"58311e47f3315284ce9ef4c691de3ad705c3cea067a750864bd808ffe3eba083"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.012657 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.013400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.013428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.013441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.014204 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5" exitCode=0 Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.014261 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.014295 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"23785bb42ce7f8bcef0085e9b62846eba6c00fd53af0e1cf497c8baf4c719e8a"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.014366 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.014398 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.015062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.015082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.015093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.015099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.015105 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.015110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.016096 5003 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697" exitCode=0 Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.016134 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.016198 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"8c90f9ea4da656fc643ebfa3e37c0bd1bf35cdc454ebfefc4054ed2f7dbb0829"} Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.016338 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.017150 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.017184 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.017198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:06 crc kubenswrapper[5003]: W0126 10:43:06.207947 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:06 crc kubenswrapper[5003]: E0126 10:43:06.208049 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:06 crc kubenswrapper[5003]: W0126 10:43:06.220080 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:06 crc kubenswrapper[5003]: E0126 10:43:06.220163 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:06 crc kubenswrapper[5003]: W0126 10:43:06.229122 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:06 crc kubenswrapper[5003]: E0126 10:43:06.229269 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:06 crc kubenswrapper[5003]: W0126 10:43:06.285570 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:06 crc kubenswrapper[5003]: E0126 10:43:06.285896 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:06 crc kubenswrapper[5003]: E0126 10:43:06.352147 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="1.6s" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.570065 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.571418 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.571459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.571469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.571495 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 10:43:06 crc kubenswrapper[5003]: E0126 10:43:06.571967 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.192:6443: connect: connection refused" node="crc" Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.937559 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.192:6443: connect: connection refused Jan 26 10:43:06 crc kubenswrapper[5003]: I0126 10:43:06.947440 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 08:05:26.165626438 +0000 UTC Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.020525 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9a3c9c6bc10414e0ee33459ed2d373ebd6153bc455c01fb47ca394b17695acdb"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.020625 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.021391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.021420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.021430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.023917 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.023959 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.023975 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.024082 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.025053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.025077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.025086 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.029420 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.029462 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.029473 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.029553 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.030501 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.030534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.030545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.031507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.031558 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.031576 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.033010 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f" exitCode=0 Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.033042 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f"} Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.033185 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.034257 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.034306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.034319 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.046153 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 10:43:07 crc kubenswrapper[5003]: E0126 10:43:07.047090 5003 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.192:6443: connect: connection refused" logger="UnhandledError" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.742435 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:07 crc kubenswrapper[5003]: I0126 10:43:07.948121 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 01:23:44.752613271 +0000 UTC Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.039685 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960"} Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.039749 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee"} Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.039856 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.041263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.041313 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.041322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.042177 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a" exitCode=0 Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.042296 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.042267 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a"} Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.042384 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.042484 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.042992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.043019 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.043028 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.043401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.043435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.043450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.043731 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.043774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.043792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.172963 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.174268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.174325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.174334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.174359 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 10:43:08 crc kubenswrapper[5003]: I0126 10:43:08.948537 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 19:16:21.892003061 +0000 UTC Jan 26 10:43:09 crc kubenswrapper[5003]: I0126 10:43:09.048256 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325"} Jan 26 10:43:09 crc kubenswrapper[5003]: I0126 10:43:09.048340 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 10:43:09 crc kubenswrapper[5003]: I0126 10:43:09.048392 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:09 crc kubenswrapper[5003]: I0126 10:43:09.049335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:09 crc kubenswrapper[5003]: I0126 10:43:09.049376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:09 crc kubenswrapper[5003]: I0126 10:43:09.049391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:09 crc kubenswrapper[5003]: I0126 10:43:09.948800 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 08:36:34.407392464 +0000 UTC Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.057214 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54"} Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.057263 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e"} Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.057292 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f"} Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.057306 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef"} Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.057387 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.058385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.058432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.058444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.365842 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.366000 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.367352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.367387 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.367396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:10 crc kubenswrapper[5003]: I0126 10:43:10.949201 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 08:35:00.732876545 +0000 UTC Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.060099 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.061056 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.061090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.061100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.135717 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.135863 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.135901 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.136999 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.137038 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.137050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.208034 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.346648 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.583387 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.721487 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.950226 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 23:14:05.86562508 +0000 UTC Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.980710 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.980877 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.982072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.982101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:11 crc kubenswrapper[5003]: I0126 10:43:11.982112 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.062355 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.062396 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.062413 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.063633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.063680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.063695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.063679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.063730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.063741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.612382 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:12 crc kubenswrapper[5003]: I0126 10:43:12.950917 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 17:11:54.557799129 +0000 UTC Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.065716 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.065743 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.067058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.067070 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.067092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.067101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.067116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.067103 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.084905 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.085078 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.088590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.088649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.088668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.092226 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:13 crc kubenswrapper[5003]: I0126 10:43:13.951865 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 05:17:09.558309945 +0000 UTC Jan 26 10:43:14 crc kubenswrapper[5003]: I0126 10:43:14.067890 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:14 crc kubenswrapper[5003]: I0126 10:43:14.068933 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:14 crc kubenswrapper[5003]: I0126 10:43:14.068964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:14 crc kubenswrapper[5003]: I0126 10:43:14.068972 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:14 crc kubenswrapper[5003]: I0126 10:43:14.952444 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 11:08:18.619266859 +0000 UTC Jan 26 10:43:14 crc kubenswrapper[5003]: I0126 10:43:14.981067 5003 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 10:43:14 crc kubenswrapper[5003]: I0126 10:43:14.981154 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 26 10:43:15 crc kubenswrapper[5003]: E0126 10:43:15.066315 5003 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 26 10:43:15 crc kubenswrapper[5003]: I0126 10:43:15.952977 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 08:26:53.307093065 +0000 UTC Jan 26 10:43:16 crc kubenswrapper[5003]: I0126 10:43:16.757778 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:16 crc kubenswrapper[5003]: I0126 10:43:16.757980 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:16 crc kubenswrapper[5003]: I0126 10:43:16.759572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:16 crc kubenswrapper[5003]: I0126 10:43:16.759615 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:16 crc kubenswrapper[5003]: I0126 10:43:16.759628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:16 crc kubenswrapper[5003]: I0126 10:43:16.765003 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:16 crc kubenswrapper[5003]: I0126 10:43:16.954113 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 23:23:26.234146461 +0000 UTC Jan 26 10:43:17 crc kubenswrapper[5003]: I0126 10:43:17.074785 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:17 crc kubenswrapper[5003]: I0126 10:43:17.076435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:17 crc kubenswrapper[5003]: I0126 10:43:17.076487 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:17 crc kubenswrapper[5003]: I0126 10:43:17.076500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:17 crc kubenswrapper[5003]: I0126 10:43:17.937893 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 26 10:43:17 crc kubenswrapper[5003]: E0126 10:43:17.953444 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Jan 26 10:43:17 crc kubenswrapper[5003]: I0126 10:43:17.954505 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 07:40:51.263432064 +0000 UTC Jan 26 10:43:18 crc kubenswrapper[5003]: E0126 10:43:18.175592 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Jan 26 10:43:18 crc kubenswrapper[5003]: W0126 10:43:18.303534 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 26 10:43:18 crc kubenswrapper[5003]: I0126 10:43:18.303866 5003 trace.go:236] Trace[1042686420]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 10:43:08.302) (total time: 10001ms): Jan 26 10:43:18 crc kubenswrapper[5003]: Trace[1042686420]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (10:43:18.303) Jan 26 10:43:18 crc kubenswrapper[5003]: Trace[1042686420]: [10.001342144s] [10.001342144s] END Jan 26 10:43:18 crc kubenswrapper[5003]: E0126 10:43:18.304013 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 26 10:43:18 crc kubenswrapper[5003]: W0126 10:43:18.463689 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 26 10:43:18 crc kubenswrapper[5003]: I0126 10:43:18.464392 5003 trace.go:236] Trace[25045803]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 10:43:08.462) (total time: 10001ms): Jan 26 10:43:18 crc kubenswrapper[5003]: Trace[25045803]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (10:43:18.463) Jan 26 10:43:18 crc kubenswrapper[5003]: Trace[25045803]: [10.001790596s] [10.001790596s] END Jan 26 10:43:18 crc kubenswrapper[5003]: E0126 10:43:18.464438 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 26 10:43:18 crc kubenswrapper[5003]: W0126 10:43:18.743749 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 26 10:43:18 crc kubenswrapper[5003]: I0126 10:43:18.743832 5003 trace.go:236] Trace[2146279561]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 10:43:08.742) (total time: 10001ms): Jan 26 10:43:18 crc kubenswrapper[5003]: Trace[2146279561]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (10:43:18.743) Jan 26 10:43:18 crc kubenswrapper[5003]: Trace[2146279561]: [10.001455507s] [10.001455507s] END Jan 26 10:43:18 crc kubenswrapper[5003]: E0126 10:43:18.743850 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 26 10:43:18 crc kubenswrapper[5003]: I0126 10:43:18.954595 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 13:32:42.984979971 +0000 UTC Jan 26 10:43:19 crc kubenswrapper[5003]: I0126 10:43:19.117024 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 26 10:43:19 crc kubenswrapper[5003]: I0126 10:43:19.117356 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 26 10:43:19 crc kubenswrapper[5003]: I0126 10:43:19.122464 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]log ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]etcd ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-api-request-count-filter ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-startkubeinformers ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/generic-apiserver-start-informers ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/priority-and-fairness-config-consumer ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/priority-and-fairness-filter ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/start-apiextensions-informers ok Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/crd-informer-synced failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/start-system-namespaces-controller ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/start-cluster-authentication-info-controller ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/start-legacy-token-tracking-controller ok Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/start-service-ip-repair-controllers failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/priority-and-fairness-config-producer failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/bootstrap-controller failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/start-kube-aggregator-informers ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/apiservice-status-local-available-controller ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/apiservice-status-remote-available-controller ok Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/apiservice-registration-controller failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/apiservice-wait-for-first-sync ok Jan 26 10:43:19 crc kubenswrapper[5003]: [-]poststarthook/apiservice-discovery-controller failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/kube-apiserver-autoregistration ok Jan 26 10:43:19 crc kubenswrapper[5003]: [-]autoregister-completion failed: reason withheld Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/apiservice-openapi-controller ok Jan 26 10:43:19 crc kubenswrapper[5003]: [+]poststarthook/apiservice-openapiv3-controller ok Jan 26 10:43:19 crc kubenswrapper[5003]: livez check failed Jan 26 10:43:19 crc kubenswrapper[5003]: I0126 10:43:19.122526 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:43:19 crc kubenswrapper[5003]: I0126 10:43:19.954855 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 09:23:13.048899784 +0000 UTC Jan 26 10:43:20 crc kubenswrapper[5003]: I0126 10:43:20.956068 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 21:07:09.191650527 +0000 UTC Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.235004 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.235185 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.236345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.236385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.236399 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.248664 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.376629 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.378266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.378544 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.378655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.378688 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 10:43:21 crc kubenswrapper[5003]: E0126 10:43:21.384246 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.587816 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.588349 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.589914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.589971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.589982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.593465 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.687907 5003 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.937840 5003 apiserver.go:52] "Watching apiserver" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.942642 5003 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.943144 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.943592 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.943783 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:21 crc kubenswrapper[5003]: E0126 10:43:21.943871 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.943955 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:21 crc kubenswrapper[5003]: E0126 10:43:21.944008 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.944187 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.944215 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.944224 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:21 crc kubenswrapper[5003]: E0126 10:43:21.944527 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.947161 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.947249 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.947619 5003 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.947877 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.947987 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.948073 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.949371 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.949400 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.949592 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.954632 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.956540 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 16:02:31.391941516 +0000 UTC Jan 26 10:43:21 crc kubenswrapper[5003]: I0126 10:43:21.985738 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.005628 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.017205 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.032523 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.050474 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.064082 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.078669 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.113149 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.113571 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 26 10:43:22 crc kubenswrapper[5003]: I0126 10:43:22.957345 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 20:45:30.072873931 +0000 UTC Jan 26 10:43:23 crc kubenswrapper[5003]: I0126 10:43:23.091386 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 10:43:23 crc kubenswrapper[5003]: I0126 10:43:23.285852 5003 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 26 10:43:23 crc kubenswrapper[5003]: I0126 10:43:23.513538 5003 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 26 10:43:23 crc kubenswrapper[5003]: I0126 10:43:23.958312 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 09:05:14.166081103 +0000 UTC Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.000910 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.000935 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.000990 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.001062 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.001174 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.001260 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.111868 5003 trace.go:236] Trace[1240299670]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 10:43:09.220) (total time: 14891ms): Jan 26 10:43:24 crc kubenswrapper[5003]: Trace[1240299670]: ---"Objects listed" error: 14891ms (10:43:24.111) Jan 26 10:43:24 crc kubenswrapper[5003]: Trace[1240299670]: [14.891068403s] [14.891068403s] END Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.111911 5003 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.129939 5003 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.134820 5003 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.155584 5003 csr.go:261] certificate signing request csr-gwpjk is approved, waiting to be issued Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.159379 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.170197 5003 csr.go:257] certificate signing request csr-gwpjk is issued Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.186180 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.198918 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.201214 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.207385 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.208659 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.213640 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.226445 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.230391 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.230596 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.230708 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.230810 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.230909 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231023 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231149 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231259 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231379 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231494 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.230901 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231606 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.230966 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231172 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231600 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231709 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231735 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231753 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231768 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231785 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231803 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231825 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231847 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231868 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231889 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231917 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231941 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231963 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231985 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232009 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232034 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232108 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232134 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232196 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232219 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232276 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232327 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232431 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232461 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232488 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232511 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232538 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232563 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232585 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232607 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232634 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232658 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232691 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232713 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232734 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.231861 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232748 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232051 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232067 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232791 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232816 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232842 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232867 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232893 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232917 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232941 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233005 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233024 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233046 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233071 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233093 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233119 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233141 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233162 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233183 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233206 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233228 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233249 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233271 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233309 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233331 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233356 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233376 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233399 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233422 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233445 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233467 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233488 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233512 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233537 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233559 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233579 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233600 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233623 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233643 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233662 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233683 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233703 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233725 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233745 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232775 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233765 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232078 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232128 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232141 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232351 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232384 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232378 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232405 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232586 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233838 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233848 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232615 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232701 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232786 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.232848 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233007 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233924 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233039 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233035 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233119 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233211 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233454 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233476 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233523 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233700 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233741 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.234079 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.234149 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.234385 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.234453 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.234545 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.234546 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.234798 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.235351 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.236784 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.236927 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.237174 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.237581 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.237761 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.237770 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.237879 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.238147 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.238233 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.238379 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.238541 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.238679 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.239038 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.239401 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.239495 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.239660 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.240107 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.240447 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.240507 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.240579 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.240959 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.240976 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.233788 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241026 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241054 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241073 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241092 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241108 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241124 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241140 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241197 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241213 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241229 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241247 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241254 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241264 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241320 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241358 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241374 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241392 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241415 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241437 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241460 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241483 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241510 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241534 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241555 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241578 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241603 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241627 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241647 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241670 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241694 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241719 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241741 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241755 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241878 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.241981 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.242428 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.242576 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.242765 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.242822 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.243152 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.243567 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.243733 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.243918 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.244078 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.244477 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.244692 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.244475 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.244780 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.244837 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.245217 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.245615 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.245716 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246136 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246611 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246723 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246758 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246805 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246824 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246866 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246895 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246917 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246946 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246973 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246998 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.247241 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.246916 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.247622 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.248136 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.250331 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.250724 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.250742 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.250785 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251000 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251159 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251412 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251463 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251590 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251603 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251662 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251704 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251742 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251781 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251813 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251848 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251887 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251924 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251970 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252003 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252035 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252065 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252096 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252131 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252160 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252191 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252221 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252395 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253558 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253618 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253650 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253685 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253718 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253750 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253782 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253814 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253843 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253872 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253903 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253948 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253979 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254008 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254042 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254072 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254102 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254135 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254163 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254192 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254220 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254250 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254300 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254339 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254373 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254403 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254434 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254507 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254543 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254581 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254615 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255455 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255538 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255569 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255606 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255635 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255691 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255720 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255756 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255789 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255828 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255865 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255923 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255898 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.256131 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.256165 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.256201 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.256308 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.251853 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252061 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252296 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252521 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.256647 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.258341 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.258703 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.258914 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.258985 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.259094 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.259396 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.259728 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.260049 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253013 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.259108 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:43:24.759080602 +0000 UTC m=+20.300306173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253214 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.261201 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253685 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253702 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.253982 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254252 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.254506 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255785 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.255797 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.256241 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.256330 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.262581 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.262585 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.262958 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.263226 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.263109 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.263507 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.263581 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.263772 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.264022 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.264338 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.252774 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.264712 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.265209 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.265395 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.265717 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.265853 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.266011 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.266055 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.266376 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.266511 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.266717 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.267031 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.267063 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.267099 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.267179 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.267231 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.267313 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.267800 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.268443 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.268752 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.268868 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.268922 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.268953 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.268996 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269017 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269043 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.269059 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.269142 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:24.769114758 +0000 UTC m=+20.310340329 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.269217 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269312 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269345 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269362 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269472 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.269519 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:24.769414066 +0000 UTC m=+20.310639637 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269545 5003 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269561 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269583 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269598 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269614 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269628 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269648 5003 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269667 5003 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.269681 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270100 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270133 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270154 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270177 5003 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270194 5003 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270209 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270227 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270296 5003 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270311 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270328 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270347 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270383 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270398 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270413 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270431 5003 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270422 5003 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270445 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270460 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270474 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270492 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270505 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270531 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270546 5003 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270563 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270576 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270589 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270615 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270636 5003 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270656 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270671 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270688 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270702 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270715 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270728 5003 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270745 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270757 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270770 5003 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270786 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270814 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270830 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270848 5003 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270870 5003 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271034 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271056 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271075 5003 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271102 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271120 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271138 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.270908 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271156 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271308 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271333 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271354 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271381 5003 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271404 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271422 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271445 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271464 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271481 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271498 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271521 5003 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271538 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271556 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271578 5003 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271599 5003 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271616 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271634 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271650 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271672 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271689 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271705 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271728 5003 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271745 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271762 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271779 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271800 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271815 5003 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271833 5003 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271850 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271872 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271892 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271910 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271931 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271947 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271965 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.271983 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272006 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272023 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272041 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272058 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272085 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272104 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272121 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272140 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272162 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272183 5003 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272202 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272225 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272241 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272257 5003 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272272 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272318 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272334 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272351 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272367 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272388 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272404 5003 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272421 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272438 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272459 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272474 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272489 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272514 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272530 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272545 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272562 5003 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272581 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272597 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272612 5003 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272628 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272647 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272663 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272682 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272704 5003 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272720 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272738 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272756 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272783 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272800 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272817 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272833 5003 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272854 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272870 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.272885 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.281505 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.281601 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.285395 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.285429 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.285449 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.285520 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:24.785499424 +0000 UTC m=+20.326725185 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.287063 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.287114 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.287511 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.290760 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.290929 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.296448 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.297573 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.297601 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.297614 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.297667 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:24.79764903 +0000 UTC m=+20.338874591 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.297793 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.298021 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.298254 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.298562 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.299024 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.299222 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.299234 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.299445 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.299480 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.302358 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.305577 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.307412 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.307644 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.307925 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.307979 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.308244 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.308595 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.308849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.308992 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.309090 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.308789 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.309886 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.310815 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.313478 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.314371 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.314462 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.315786 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.315969 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.316235 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.316417 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.316544 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.316828 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.318136 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.319048 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.319104 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.319752 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.321996 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.322470 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.322824 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.322821 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.322919 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.322983 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.323514 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.323095 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.326106 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.330625 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.340487 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.340742 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.343213 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.348604 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.355171 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.372431 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374143 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374191 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374223 5003 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374233 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374242 5003 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374251 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374272 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374302 5003 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374312 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374352 5003 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374363 5003 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374374 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374383 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374392 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374401 5003 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374410 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374421 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374432 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374443 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374452 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374463 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374427 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374476 5003 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374500 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374530 5003 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374592 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374610 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374623 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374660 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374677 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374690 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374704 5003 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374714 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374757 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374769 5003 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374781 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374793 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374836 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374851 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374862 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374874 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374914 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374928 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374939 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374951 5003 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.374963 5003 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.375001 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.375009 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.375018 5003 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.375028 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.375037 5003 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.375046 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.375078 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.378413 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.387632 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: W0126 10:43:24.400559 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-155059c11c4f4d9414c19824132c205b933d1b7d8ad4984c68cfea87f14df1d9 WatchSource:0}: Error finding container 155059c11c4f4d9414c19824132c205b933d1b7d8ad4984c68cfea87f14df1d9: Status 404 returned error can't find the container with id 155059c11c4f4d9414c19824132c205b933d1b7d8ad4984c68cfea87f14df1d9 Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.400835 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.423197 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.438421 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.453049 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.659245 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.671264 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 10:43:24 crc kubenswrapper[5003]: W0126 10:43:24.685963 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-7dc6d66380437069b7a6d6bcfc9d82f775beedf9a7ccd4686ff1e2d3b6bf680f WatchSource:0}: Error finding container 7dc6d66380437069b7a6d6bcfc9d82f775beedf9a7ccd4686ff1e2d3b6bf680f: Status 404 returned error can't find the container with id 7dc6d66380437069b7a6d6bcfc9d82f775beedf9a7ccd4686ff1e2d3b6bf680f Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.778385 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.778562 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:43:25.778537388 +0000 UTC m=+21.319762949 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.778699 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.778730 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.778785 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.778826 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.778834 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:25.778822046 +0000 UTC m=+21.320047607 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.778870 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:25.778856817 +0000 UTC m=+21.320082378 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.849860 5003 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 26 10:43:24 crc kubenswrapper[5003]: W0126 10:43:24.850724 5003 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.RuntimeClass ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.850835 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd/events\": read tcp 38.102.83.192:37176->38.102.83.192:6443: use of closed network connection" event="&Event{ObjectMeta:{etcd-crc.188e41ecce761de3 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Started,Message:Started container etcdctl,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 10:43:08.935118307 +0000 UTC m=+4.476343868,LastTimestamp:2026-01-26 10:43:08.935118307 +0000 UTC m=+4.476343868,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.879971 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.880026 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.880165 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.880192 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.880206 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.880248 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:25.880235973 +0000 UTC m=+21.421461534 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.880325 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.880339 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.880348 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:24 crc kubenswrapper[5003]: E0126 10:43:24.880376 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:25.880369897 +0000 UTC m=+21.421595458 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:24 crc kubenswrapper[5003]: I0126 10:43:24.958785 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 02:46:17.627945502 +0000 UTC Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.005042 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.005958 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.008165 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.009358 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.011074 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.012267 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.013394 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.015018 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.015925 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.017783 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.017926 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.018757 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.020402 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.021056 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.022046 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.027126 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.027868 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.028798 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.029411 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.030207 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.030983 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.031731 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.032581 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.033165 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.035338 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.036232 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.037076 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.037960 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.038668 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.039647 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.039675 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.040754 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.041424 5003 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.041598 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.044048 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.044533 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.045668 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.047120 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.047893 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.049030 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.049838 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.054442 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.055083 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.055902 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.056301 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.057176 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.058559 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.059153 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.060192 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.060954 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.062793 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.063389 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.064793 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.066236 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.067681 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.069121 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.069745 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.080289 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.093011 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.095774 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2"} Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.095825 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84"} Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.095840 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"155059c11c4f4d9414c19824132c205b933d1b7d8ad4984c68cfea87f14df1d9"} Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.096601 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7dc6d66380437069b7a6d6bcfc9d82f775beedf9a7ccd4686ff1e2d3b6bf680f"} Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.098352 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95"} Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.098376 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d747eb6201ec9d32315c72283a71b23c67cf3d5bf2676b7cb425d2a5036fb2be"} Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.111173 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-x7zz5"] Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.113151 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-x7zz5" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.113313 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.119095 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.119490 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.122834 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.137940 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.155692 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.169550 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.170981 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-26 10:38:24 +0000 UTC, rotation deadline is 2026-10-15 21:44:14.481651456 +0000 UTC Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.171036 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6299h0m49.310618246s for next certificate rotation Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.179900 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.181648 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t4tc\" (UniqueName: \"kubernetes.io/projected/6bd79f7d-ecd0-490f-af95-e995c1f3c052-kube-api-access-6t4tc\") pod \"node-resolver-x7zz5\" (UID: \"6bd79f7d-ecd0-490f-af95-e995c1f3c052\") " pod="openshift-dns/node-resolver-x7zz5" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.181693 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6bd79f7d-ecd0-490f-af95-e995c1f3c052-hosts-file\") pod \"node-resolver-x7zz5\" (UID: \"6bd79f7d-ecd0-490f-af95-e995c1f3c052\") " pod="openshift-dns/node-resolver-x7zz5" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.190388 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.200620 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.244957 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.265989 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.282473 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t4tc\" (UniqueName: \"kubernetes.io/projected/6bd79f7d-ecd0-490f-af95-e995c1f3c052-kube-api-access-6t4tc\") pod \"node-resolver-x7zz5\" (UID: \"6bd79f7d-ecd0-490f-af95-e995c1f3c052\") " pod="openshift-dns/node-resolver-x7zz5" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.282725 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6bd79f7d-ecd0-490f-af95-e995c1f3c052-hosts-file\") pod \"node-resolver-x7zz5\" (UID: \"6bd79f7d-ecd0-490f-af95-e995c1f3c052\") " pod="openshift-dns/node-resolver-x7zz5" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.282829 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6bd79f7d-ecd0-490f-af95-e995c1f3c052-hosts-file\") pod \"node-resolver-x7zz5\" (UID: \"6bd79f7d-ecd0-490f-af95-e995c1f3c052\") " pod="openshift-dns/node-resolver-x7zz5" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.295547 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.306762 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t4tc\" (UniqueName: \"kubernetes.io/projected/6bd79f7d-ecd0-490f-af95-e995c1f3c052-kube-api-access-6t4tc\") pod \"node-resolver-x7zz5\" (UID: \"6bd79f7d-ecd0-490f-af95-e995c1f3c052\") " pod="openshift-dns/node-resolver-x7zz5" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.324176 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.352653 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.372154 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.385370 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.429774 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-x7zz5" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.516763 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-m84kp"] Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.517076 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-x9nkb"] Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.517266 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.517874 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-vpb6l"] Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.517988 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.518148 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.519821 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.520200 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.520380 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.520754 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.521389 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.521524 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.521560 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.521592 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.521714 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q24zl"] Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.522378 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.522661 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.523061 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.523177 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.524676 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.529457 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.529673 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.529952 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.530172 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.530346 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.530557 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.530757 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.540603 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.554122 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.570053 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.582811 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586215 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-tuning-conf-dir\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586321 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-cnibin\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586345 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586366 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-k8s-cni-cncf-io\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586429 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kk74\" (UniqueName: \"kubernetes.io/projected/9a2a5d08-c449-45c6-8e1f-340c076422db-kube-api-access-6kk74\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586449 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-cnibin\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586469 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a045c900-bcf1-42b6-894a-10c38739cf92-cni-binary-copy\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586487 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-netns\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586553 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovn-node-metrics-cert\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586576 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9a2a5d08-c449-45c6-8e1f-340c076422db-cni-binary-copy\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586623 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-bin\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586690 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-cni-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586712 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-kubelet\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586756 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-netns\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586778 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-slash\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586798 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-ovn-kubernetes\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586834 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-config\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586856 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-proxy-tls\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.586983 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-os-release\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587005 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-hostroot\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587027 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-os-release\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587077 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-conf-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587097 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-systemd\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587152 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-ovn\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587175 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-system-cni-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587221 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-etc-kubernetes\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587244 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gmzb\" (UniqueName: \"kubernetes.io/projected/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-kube-api-access-5gmzb\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587316 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xp458\" (UniqueName: \"kubernetes.io/projected/f9a98683-f9ac-45d4-9312-43ebf25bdb52-kube-api-access-xp458\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587353 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-multus-certs\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587374 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-node-log\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587394 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-cni-bin\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587414 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-log-socket\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587433 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-env-overrides\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587453 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-netd\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587474 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77fgj\" (UniqueName: \"kubernetes.io/projected/a045c900-bcf1-42b6-894a-10c38739cf92-kube-api-access-77fgj\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587495 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-socket-dir-parent\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587517 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-cni-multus\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587536 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-rootfs\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587556 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-system-cni-dir\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587584 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-script-lib\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587606 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a045c900-bcf1-42b6-894a-10c38739cf92-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587625 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-kubelet\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587645 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-systemd-units\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587668 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-etc-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587694 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587717 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-mcd-auth-proxy-config\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587740 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-daemon-config\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.587760 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-var-lib-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.598354 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.617585 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.638466 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.654119 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.667799 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.682004 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688682 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-conf-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688730 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-systemd\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688755 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-ovn\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688781 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-system-cni-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688802 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-etc-kubernetes\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688826 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gmzb\" (UniqueName: \"kubernetes.io/projected/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-kube-api-access-5gmzb\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688857 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xp458\" (UniqueName: \"kubernetes.io/projected/f9a98683-f9ac-45d4-9312-43ebf25bdb52-kube-api-access-xp458\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688882 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-node-log\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688904 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-multus-certs\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688934 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-env-overrides\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688961 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-cni-bin\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.688981 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-log-socket\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689001 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-netd\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689020 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77fgj\" (UniqueName: \"kubernetes.io/projected/a045c900-bcf1-42b6-894a-10c38739cf92-kube-api-access-77fgj\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689040 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-system-cni-dir\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689060 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-socket-dir-parent\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689079 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-cni-multus\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689099 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-rootfs\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689127 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-script-lib\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689148 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-mcd-auth-proxy-config\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689173 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a045c900-bcf1-42b6-894a-10c38739cf92-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689195 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-kubelet\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689217 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-systemd-units\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689237 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-etc-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689260 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689308 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-daemon-config\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689331 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-var-lib-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689351 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689376 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-tuning-conf-dir\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689421 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-cnibin\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a045c900-bcf1-42b6-894a-10c38739cf92-cni-binary-copy\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689468 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-k8s-cni-cncf-io\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689490 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kk74\" (UniqueName: \"kubernetes.io/projected/9a2a5d08-c449-45c6-8e1f-340c076422db-kube-api-access-6kk74\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689512 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-cnibin\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689521 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-conf-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689533 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9a2a5d08-c449-45c6-8e1f-340c076422db-cni-binary-copy\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689586 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-netns\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689604 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovn-node-metrics-cert\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689646 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-kubelet\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689686 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-bin\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689706 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689731 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-cni-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689795 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-slash\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689830 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-netns\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689851 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-hostroot\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689882 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-cni-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689905 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-systemd\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689913 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-ovn-kubernetes\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689887 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-ovn-kubernetes\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690009 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-config\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690108 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9a2a5d08-c449-45c6-8e1f-340c076422db-cni-binary-copy\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690126 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-proxy-tls\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690154 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-os-release\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690227 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-os-release\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690393 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-node-log\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690436 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-os-release\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690438 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-multus-certs\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690475 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-os-release\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690500 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-socket-dir-parent\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690486 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-kubelet\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690555 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-ovn\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690564 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-var-lib-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690534 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9a2a5d08-c449-45c6-8e1f-340c076422db-multus-daemon-config\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690515 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-cni-multus\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690597 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-systemd-units\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690605 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-system-cni-dir\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690613 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a045c900-bcf1-42b6-894a-10c38739cf92-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690626 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690528 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-rootfs\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690668 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-etc-openvswitch\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690747 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-hostroot\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690898 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-cni-bin\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690908 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-env-overrides\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690928 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-log-socket\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690951 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-netns\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.689484 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-system-cni-dir\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.690973 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-netd\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691003 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-netns\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691004 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-cnibin\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691160 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-cnibin\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691166 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-script-lib\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691272 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-run-k8s-cni-cncf-io\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691387 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-bin\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691464 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-slash\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691572 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-host-var-lib-kubelet\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691663 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9a2a5d08-c449-45c6-8e1f-340c076422db-etc-kubernetes\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691712 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a045c900-bcf1-42b6-894a-10c38739cf92-tuning-conf-dir\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691692 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-config\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691794 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-mcd-auth-proxy-config\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.691859 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a045c900-bcf1-42b6-894a-10c38739cf92-cni-binary-copy\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.694616 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-proxy-tls\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.694858 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovn-node-metrics-cert\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.697257 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.707073 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kk74\" (UniqueName: \"kubernetes.io/projected/9a2a5d08-c449-45c6-8e1f-340c076422db-kube-api-access-6kk74\") pod \"multus-vpb6l\" (UID: \"9a2a5d08-c449-45c6-8e1f-340c076422db\") " pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.708194 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.708705 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77fgj\" (UniqueName: \"kubernetes.io/projected/a045c900-bcf1-42b6-894a-10c38739cf92-kube-api-access-77fgj\") pod \"multus-additional-cni-plugins-x9nkb\" (UID: \"a045c900-bcf1-42b6-894a-10c38739cf92\") " pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.714496 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gmzb\" (UniqueName: \"kubernetes.io/projected/c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd-kube-api-access-5gmzb\") pod \"machine-config-daemon-m84kp\" (UID: \"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\") " pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.716973 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xp458\" (UniqueName: \"kubernetes.io/projected/f9a98683-f9ac-45d4-9312-43ebf25bdb52-kube-api-access-xp458\") pod \"ovnkube-node-q24zl\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.718552 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.731309 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.742733 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.762224 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.779390 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.790134 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.790665 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.790774 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.790801 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:43:27.79078196 +0000 UTC m=+23.332007521 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.790831 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.790928 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.790988 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:27.790974666 +0000 UTC m=+23.332200237 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.791122 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.791242 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:27.791226223 +0000 UTC m=+23.332451784 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.805336 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.822316 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.842136 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.848001 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.849004 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" Jan 26 10:43:25 crc kubenswrapper[5003]: W0126 10:43:25.855960 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9e56ffa_1020_4f9f_b2f4_cd11ed3850bd.slice/crio-0876cfbb4bbe1b63d4e2f2715b9a2c7652c7a25a597d47efddaff762c8462392 WatchSource:0}: Error finding container 0876cfbb4bbe1b63d4e2f2715b9a2c7652c7a25a597d47efddaff762c8462392: Status 404 returned error can't find the container with id 0876cfbb4bbe1b63d4e2f2715b9a2c7652c7a25a597d47efddaff762c8462392 Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.858567 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vpb6l" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.861840 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.870366 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.873442 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: W0126 10:43:25.873743 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a2a5d08_c449_45c6_8e1f_340c076422db.slice/crio-3acfb60da4543f944d04e227bf45578327f76e5ff15d04b172faa23ce4ecfcf5 WatchSource:0}: Error finding container 3acfb60da4543f944d04e227bf45578327f76e5ff15d04b172faa23ce4ecfcf5: Status 404 returned error can't find the container with id 3acfb60da4543f944d04e227bf45578327f76e5ff15d04b172faa23ce4ecfcf5 Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.886873 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.892218 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.892301 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.892432 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.892449 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.892462 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.892468 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.892495 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.892509 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.892510 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:27.892492665 +0000 UTC m=+23.433718226 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:25 crc kubenswrapper[5003]: E0126 10:43:25.892585 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:27.892565767 +0000 UTC m=+23.433791408 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.898786 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:25 crc kubenswrapper[5003]: I0126 10:43:25.959172 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 00:02:44.534266754 +0000 UTC Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.000661 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.000761 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:26 crc kubenswrapper[5003]: E0126 10:43:26.000832 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:26 crc kubenswrapper[5003]: E0126 10:43:26.000879 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.000925 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:26 crc kubenswrapper[5003]: E0126 10:43:26.001034 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.110061 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-x7zz5" event={"ID":"6bd79f7d-ecd0-490f-af95-e995c1f3c052","Type":"ContainerStarted","Data":"214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.110507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-x7zz5" event={"ID":"6bd79f7d-ecd0-490f-af95-e995c1f3c052","Type":"ContainerStarted","Data":"5587b52a1fceb974c15ff8d635984a453124e45131a6fc4f7476c65eaed5a055"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.113329 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.113364 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"0f88ecab4e09e11a3e128239077bcbcaa6341f139f9e872a006c907d8c619d28"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.115222 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpb6l" event={"ID":"9a2a5d08-c449-45c6-8e1f-340c076422db","Type":"ContainerStarted","Data":"432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.115251 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpb6l" event={"ID":"9a2a5d08-c449-45c6-8e1f-340c076422db","Type":"ContainerStarted","Data":"3acfb60da4543f944d04e227bf45578327f76e5ff15d04b172faa23ce4ecfcf5"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.117407 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" event={"ID":"a045c900-bcf1-42b6-894a-10c38739cf92","Type":"ContainerStarted","Data":"b8a7f40bdb054f9817d821a296c7e121ccc5c88f6edd490a9a3cf805ecd2dd4e"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.120467 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.120527 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"0876cfbb4bbe1b63d4e2f2715b9a2c7652c7a25a597d47efddaff762c8462392"} Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.124544 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.136632 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.148136 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.168487 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.179551 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.193872 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.215791 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.240328 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.257329 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.269993 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.311583 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.334000 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.359477 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.377376 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.390805 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.405675 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.422724 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.437814 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.474564 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.492170 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.506749 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.524078 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.539656 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.551637 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.564512 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.574894 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.587234 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.604692 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:26Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:26 crc kubenswrapper[5003]: I0126 10:43:26.960073 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 23:12:24.136977894 +0000 UTC Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.126106 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9"} Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.127817 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff" exitCode=0 Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.127884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.130133 5003 generic.go:334] "Generic (PLEG): container finished" podID="a045c900-bcf1-42b6-894a-10c38739cf92" containerID="9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109" exitCode=0 Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.130212 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" event={"ID":"a045c900-bcf1-42b6-894a-10c38739cf92","Type":"ContainerDied","Data":"9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109"} Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.138371 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.150160 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.176814 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.193515 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.217477 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.234579 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.246399 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.257640 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.269823 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.289303 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.302157 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.313775 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.325393 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.338875 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.353024 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.364448 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.378032 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.396640 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.421063 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.435812 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.447583 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.459076 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.471776 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.481064 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.498947 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.516367 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.538651 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.565337 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.785154 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.786892 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.786920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.786928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.787025 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.794790 5003 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.795004 5003 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.795941 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.795962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.795969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.795981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.795989 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:27Z","lastTransitionTime":"2026-01-26T10:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.822633 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.822752 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.822802 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.822903 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.822985 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:31.822967993 +0000 UTC m=+27.364193554 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.823051 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:43:31.823041295 +0000 UTC m=+27.364266866 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.823122 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.823156 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:31.823146958 +0000 UTC m=+27.364372519 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.826137 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.833744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.833783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.833794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.833812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.833824 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:27Z","lastTransitionTime":"2026-01-26T10:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.853607 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.858979 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.859021 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.859033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.859048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.859060 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:27Z","lastTransitionTime":"2026-01-26T10:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.879999 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.885329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.885636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.885650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.885667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.885678 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:27Z","lastTransitionTime":"2026-01-26T10:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.904703 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.907512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.907549 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.907561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.907576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.907587 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:27Z","lastTransitionTime":"2026-01-26T10:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.923548 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.923588 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.923693 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.923707 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.923719 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.923742 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.923767 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:31.923754182 +0000 UTC m=+27.464979743 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.923768 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.923784 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.923825 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:31.923810823 +0000 UTC m=+27.465036384 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.924003 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:27Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:27 crc kubenswrapper[5003]: E0126 10:43:27.924307 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.926590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.926628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.926641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.926658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.926670 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:27Z","lastTransitionTime":"2026-01-26T10:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:27 crc kubenswrapper[5003]: I0126 10:43:27.960607 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 13:28:56.616596199 +0000 UTC Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.001262 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.001431 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:28 crc kubenswrapper[5003]: E0126 10:43:28.001509 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.001535 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:28 crc kubenswrapper[5003]: E0126 10:43:28.001677 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:28 crc kubenswrapper[5003]: E0126 10:43:28.001804 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.029145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.029189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.029201 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.029216 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.029226 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.131707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.131742 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.131750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.131790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.131800 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.133762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.136835 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.136880 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.136891 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.136901 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.136910 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.136921 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.138530 5003 generic.go:334] "Generic (PLEG): container finished" podID="a045c900-bcf1-42b6-894a-10c38739cf92" containerID="f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb" exitCode=0 Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.138599 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" event={"ID":"a045c900-bcf1-42b6-894a-10c38739cf92","Type":"ContainerDied","Data":"f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.144008 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-xfz4f"] Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.144322 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.145947 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.146058 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.146376 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.146667 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.149334 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.160423 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.173922 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.186795 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.210160 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.223808 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.225651 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j74r8\" (UniqueName: \"kubernetes.io/projected/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-kube-api-access-j74r8\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.225700 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-host\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.225768 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-serviceca\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.235561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.235604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.235620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.235640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.235656 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.240345 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.252854 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.266526 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.286109 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.298602 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.312774 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.325170 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.326502 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j74r8\" (UniqueName: \"kubernetes.io/projected/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-kube-api-access-j74r8\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.326539 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-host\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.326572 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-serviceca\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.326721 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-host\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.327548 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-serviceca\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.338091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.338123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.338133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.338146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.338154 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.340821 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.343776 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j74r8\" (UniqueName: \"kubernetes.io/projected/5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05-kube-api-access-j74r8\") pod \"node-ca-xfz4f\" (UID: \"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\") " pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.355190 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.370616 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.382039 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.412702 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.440458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.440505 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.440516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.440532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.440544 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.455955 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-xfz4f" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.462234 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: W0126 10:43:28.473526 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c23d8cc_d7a8_4d19_b9cc_a54ed2bf0b05.slice/crio-74792923cc460b80cbd8fce39e49261630a18dadf01e51350f6262424d30a592 WatchSource:0}: Error finding container 74792923cc460b80cbd8fce39e49261630a18dadf01e51350f6262424d30a592: Status 404 returned error can't find the container with id 74792923cc460b80cbd8fce39e49261630a18dadf01e51350f6262424d30a592 Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.494348 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.532902 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.545200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.545238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.545253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.545270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.545300 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.570901 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.612479 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.647681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.647727 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.647781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.647801 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.647813 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.649051 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.699516 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.730230 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.750946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.750982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.750992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.751009 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.751018 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.770701 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.813585 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.850697 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:28Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.852937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.852978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.852988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.853004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.853014 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.955522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.955556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.955564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.955579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.955591 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:28Z","lastTransitionTime":"2026-01-26T10:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:28 crc kubenswrapper[5003]: I0126 10:43:28.961745 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 02:12:27.254189632 +0000 UTC Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.058329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.058371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.058379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.058417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.058428 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.143908 5003 generic.go:334] "Generic (PLEG): container finished" podID="a045c900-bcf1-42b6-894a-10c38739cf92" containerID="c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b" exitCode=0 Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.143995 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" event={"ID":"a045c900-bcf1-42b6-894a-10c38739cf92","Type":"ContainerDied","Data":"c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.146618 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-xfz4f" event={"ID":"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05","Type":"ContainerStarted","Data":"81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.146646 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-xfz4f" event={"ID":"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05","Type":"ContainerStarted","Data":"74792923cc460b80cbd8fce39e49261630a18dadf01e51350f6262424d30a592"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.160160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.160339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.160417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.160483 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.160545 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.160773 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.172367 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.183410 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.194066 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.203472 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.221674 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.231509 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.243049 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.259924 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.262505 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.262541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.262550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.262564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.262575 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.272723 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.297492 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.331331 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.365491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.365590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.365609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.365673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.365688 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.372147 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.412006 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.451129 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.469664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.469734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.469763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.469788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.469805 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.495026 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.533827 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.572986 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.573825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.573931 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.573949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.573967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.573977 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.614235 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.654580 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.678132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.678190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.678201 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.678219 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.678233 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.691691 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.731929 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.772255 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.781441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.781502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.781517 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.781693 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.781782 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.823025 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.853034 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.884229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.884260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.884268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.884300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.884309 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.891169 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.930691 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.962628 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 16:41:21.110860882 +0000 UTC Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.977372 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.986442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.986475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.986484 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.986499 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:29 crc kubenswrapper[5003]: I0126 10:43:29.986507 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:29Z","lastTransitionTime":"2026-01-26T10:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.008815 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.008940 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:30 crc kubenswrapper[5003]: E0126 10:43:30.008990 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.008838 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:30 crc kubenswrapper[5003]: E0126 10:43:30.009148 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:30 crc kubenswrapper[5003]: E0126 10:43:30.009254 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.027438 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.058217 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.088596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.088648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.088661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.088688 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.088705 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.152606 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" event={"ID":"a045c900-bcf1-42b6-894a-10c38739cf92","Type":"ContainerDied","Data":"cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.152519 5003 generic.go:334] "Generic (PLEG): container finished" podID="a045c900-bcf1-42b6-894a-10c38739cf92" containerID="cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083" exitCode=0 Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.159202 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.168271 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.179720 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.191646 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.192087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.192109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.192118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.192136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.192150 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.211582 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.252012 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.296496 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.296812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.296891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.296907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.296936 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.296956 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.332963 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.371134 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.399476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.399681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.399771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.399841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.399895 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.411337 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.456907 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.489579 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.502137 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.502194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.502210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.502232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.502249 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.503016 5003 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.552508 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.595738 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.604662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.604710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.604721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.604738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.604749 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.632856 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.676266 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:30Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.707035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.707084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.707097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.707120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.707132 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.809221 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.809516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.809525 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.809539 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.809549 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.911736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.911782 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.911794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.911811 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.911822 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:30Z","lastTransitionTime":"2026-01-26T10:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:30 crc kubenswrapper[5003]: I0126 10:43:30.963465 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 04:47:39.667864586 +0000 UTC Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.013893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.013946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.013968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.013986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.014001 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.116867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.116916 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.116926 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.116939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.116948 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.165686 5003 generic.go:334] "Generic (PLEG): container finished" podID="a045c900-bcf1-42b6-894a-10c38739cf92" containerID="203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c" exitCode=0 Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.165741 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" event={"ID":"a045c900-bcf1-42b6-894a-10c38739cf92","Type":"ContainerDied","Data":"203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.179185 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.193089 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.206370 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.219247 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.219526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.219557 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.219568 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.219583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.219593 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.231068 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.243598 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.254873 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.266434 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.276556 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.294578 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.305665 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.315302 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.321108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.321139 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.321151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.321167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.321180 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.328917 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.340519 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.357698 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:31Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.423907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.423943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.423954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.423970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.423981 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.526531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.526569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.526580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.526600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.526614 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.629173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.629220 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.629233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.629249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.629277 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.731952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.732006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.732019 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.732038 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.732053 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.834134 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.834176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.834187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.834204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.834216 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.869252 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.869454 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:43:39.86942248 +0000 UTC m=+35.410648041 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.869562 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.869620 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.869713 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.869760 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:39.869751569 +0000 UTC m=+35.410977200 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.869795 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.869907 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:39.869876712 +0000 UTC m=+35.411102303 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.935835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.935872 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.935887 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.935903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.935914 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:31Z","lastTransitionTime":"2026-01-26T10:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.964072 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 16:17:49.617831989 +0000 UTC Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.970663 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:31 crc kubenswrapper[5003]: I0126 10:43:31.970721 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.970805 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.970806 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.970840 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.970854 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.970904 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:39.970888708 +0000 UTC m=+35.512114269 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.970818 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.970928 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:31 crc kubenswrapper[5003]: E0126 10:43:31.970967 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:39.970949839 +0000 UTC m=+35.512175410 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.000872 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.000889 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:32 crc kubenswrapper[5003]: E0126 10:43:32.000996 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:32 crc kubenswrapper[5003]: E0126 10:43:32.001059 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.000902 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:32 crc kubenswrapper[5003]: E0126 10:43:32.001145 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.037607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.037646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.037653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.037666 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.037675 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.140012 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.140044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.140053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.140066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.140075 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.182432 5003 generic.go:334] "Generic (PLEG): container finished" podID="a045c900-bcf1-42b6-894a-10c38739cf92" containerID="eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b" exitCode=0 Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.182477 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" event={"ID":"a045c900-bcf1-42b6-894a-10c38739cf92","Type":"ContainerDied","Data":"eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.196995 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.211569 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.226739 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.242783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.242818 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.242840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.242855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.242864 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.247170 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.263553 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.279135 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.296634 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.312895 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.327314 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.341185 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.345385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.345415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.345424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.345437 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.345448 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.355877 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.366208 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.387330 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.406729 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.418006 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:32Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.447973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.448006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.448015 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.448045 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.448058 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.552776 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.552804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.552812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.552824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.552834 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.655224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.655258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.655270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.655316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.655333 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.758131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.758213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.758227 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.758326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.758351 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.860080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.860125 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.860135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.860153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.860165 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.962736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.962778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.962795 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.962813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.962828 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:32Z","lastTransitionTime":"2026-01-26T10:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:32 crc kubenswrapper[5003]: I0126 10:43:32.964469 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 19:48:32.172560631 +0000 UTC Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.065359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.066412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.066512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.066633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.066726 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.169240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.169550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.169660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.169781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.169892 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.190944 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" event={"ID":"a045c900-bcf1-42b6-894a-10c38739cf92","Type":"ContainerStarted","Data":"f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.196327 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.196634 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.196843 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.203771 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.216480 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.232614 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.246999 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.256106 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.256728 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.260783 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.271542 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.272421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.272452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.272461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.272474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.272483 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.282620 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.293053 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.312182 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.321889 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.331163 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.343759 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.354175 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.375264 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.375466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.375490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.375500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.375515 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.375525 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.388587 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.399096 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.409417 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.420882 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.433327 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.442939 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.453262 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.462648 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.472149 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.477262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.477331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.477345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.477362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.477373 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.492456 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.507936 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.522488 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.537448 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.550957 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.568015 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.579341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.579383 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.579395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.579411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.579425 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.581535 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:33Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.681396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.681442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.681454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.681472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.681484 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.784089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.784476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.784567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.784678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.785054 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.889862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.890136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.890269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.890459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.890592 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.964766 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 23:24:37.405180141 +0000 UTC Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.993157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.993345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.993417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.993488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:33 crc kubenswrapper[5003]: I0126 10:43:33.993546 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:33Z","lastTransitionTime":"2026-01-26T10:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.000700 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.000776 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:34 crc kubenswrapper[5003]: E0126 10:43:34.000915 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.000722 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:34 crc kubenswrapper[5003]: E0126 10:43:34.001111 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:34 crc kubenswrapper[5003]: E0126 10:43:34.001208 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.096157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.096225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.096246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.096273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.096342 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.198792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.199029 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.199117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.199206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.199274 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.199410 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.301823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.301851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.301860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.301873 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.301883 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.404183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.404204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.404213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.404228 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.404239 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.505750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.505962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.506056 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.506147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.506229 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.608595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.608641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.608654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.608671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.608681 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.711471 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.711515 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.711527 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.711543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.711555 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.814114 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.814141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.814149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.814163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.814173 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.917322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.917365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.917374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.917388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.917397 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:34Z","lastTransitionTime":"2026-01-26T10:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:34 crc kubenswrapper[5003]: I0126 10:43:34.964898 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 04:23:03.473020123 +0000 UTC Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.011820 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.019881 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.019934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.019943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.019957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.019968 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.024544 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.038083 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.050303 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.067956 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.087466 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.101737 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.114455 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.125263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.125323 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.125336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.125364 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.125375 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.129207 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.148772 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.160856 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.171434 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.185895 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.197201 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.201448 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.208065 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.228118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.228151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.228161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.228176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.228187 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.330777 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.331060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.331071 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.331100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.331115 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.439023 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.439080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.439091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.439115 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.439133 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.542208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.542246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.542254 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.542270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.542298 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.644432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.644533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.644548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.644569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.644584 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.747488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.747573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.747602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.747636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.747659 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.851132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.851191 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.851202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.851224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.851237 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.955269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.955345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.955362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.955383 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.955398 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:35Z","lastTransitionTime":"2026-01-26T10:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:35 crc kubenswrapper[5003]: I0126 10:43:35.965168 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 19:57:15.06136095 +0000 UTC Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.000988 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.001150 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.001038 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:36 crc kubenswrapper[5003]: E0126 10:43:36.001243 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:36 crc kubenswrapper[5003]: E0126 10:43:36.001398 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:36 crc kubenswrapper[5003]: E0126 10:43:36.001541 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.058221 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.058324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.058349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.058377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.058401 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.161391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.161429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.161440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.161456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.161469 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.205267 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/0.log" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.208057 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93" exitCode=1 Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.208100 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.208789 5003 scope.go:117] "RemoveContainer" containerID="889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.224005 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.240186 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.253764 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.264320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.264366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.264383 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.264406 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.264423 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.270406 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.283789 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.310137 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.323216 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.340167 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.376142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.376182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.376196 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.376216 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.376231 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.399417 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.421327 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.441934 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:35Z\\\",\\\"message\\\":\\\"0:43:34.936707 6281 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 10:43:34.936704 6281 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 10:43:34.936731 6281 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 10:43:34.936742 6281 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 10:43:34.936748 6281 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 10:43:34.936754 6281 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 10:43:34.936760 6281 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 10:43:34.936757 6281 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.936952 6281 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937069 6281 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937110 6281 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937431 6281 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 10:43:34.937510 6281 factory.go:656] Stopping watch factory\\\\nI0126 10:43:34.937540 6281 ovnkube.go:599] Stopped ovnkube\\\\nI0126 10:43:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.478232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.478267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.478301 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.478320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.478330 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.484065 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.495609 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.507483 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.519304 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:36Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.580846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.580909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.580932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.580961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.580983 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.683798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.684160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.684337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.684477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.684609 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.787682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.787961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.788055 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.788193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.788346 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.891066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.891147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.891169 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.891198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.891222 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.966166 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 21:49:24.79411326 +0000 UTC Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.994075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.994148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.994172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.994203 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:36 crc kubenswrapper[5003]: I0126 10:43:36.994227 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:36Z","lastTransitionTime":"2026-01-26T10:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.097955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.098005 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.098020 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.098042 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.098058 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.200634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.201162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.201253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.201365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.201451 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.304681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.304725 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.304734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.304748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.304762 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.408136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.409110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.409613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.409657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.409672 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.512324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.512357 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.512366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.512378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.512386 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.614836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.614895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.614914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.614938 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.614954 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.710049 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s"] Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.710827 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.715117 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.715839 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.717633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.717703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.717726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.717755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.717781 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.728968 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.743919 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.755987 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.768042 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.786079 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.843839 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0eac8a62-4b57-4423-b3b6-5d62047182f3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.843910 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0eac8a62-4b57-4423-b3b6-5d62047182f3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.843950 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdjlm\" (UniqueName: \"kubernetes.io/projected/0eac8a62-4b57-4423-b3b6-5d62047182f3-kube-api-access-cdjlm\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.843946 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.844207 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0eac8a62-4b57-4423-b3b6-5d62047182f3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.846418 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.846454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.846467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.846581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.846599 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.862874 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.878174 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.895985 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.908742 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.936553 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.945424 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0eac8a62-4b57-4423-b3b6-5d62047182f3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.945480 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0eac8a62-4b57-4423-b3b6-5d62047182f3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.945506 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0eac8a62-4b57-4423-b3b6-5d62047182f3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.945532 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdjlm\" (UniqueName: \"kubernetes.io/projected/0eac8a62-4b57-4423-b3b6-5d62047182f3-kube-api-access-cdjlm\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.946179 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0eac8a62-4b57-4423-b3b6-5d62047182f3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.946326 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0eac8a62-4b57-4423-b3b6-5d62047182f3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.950381 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.950403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.950411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.950423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.950432 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:37Z","lastTransitionTime":"2026-01-26T10:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.953137 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0eac8a62-4b57-4423-b3b6-5d62047182f3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.953262 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.966508 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdjlm\" (UniqueName: \"kubernetes.io/projected/0eac8a62-4b57-4423-b3b6-5d62047182f3-kube-api-access-cdjlm\") pod \"ovnkube-control-plane-749d76644c-s4f8s\" (UID: \"0eac8a62-4b57-4423-b3b6-5d62047182f3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.967507 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 00:33:11.674829681 +0000 UTC Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.968335 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:37 crc kubenswrapper[5003]: I0126 10:43:37.983571 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.000794 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.000849 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.000805 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.001007 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.001066 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.001130 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.000724 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:37Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.022371 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:35Z\\\",\\\"message\\\":\\\"0:43:34.936707 6281 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 10:43:34.936704 6281 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 10:43:34.936731 6281 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 10:43:34.936742 6281 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 10:43:34.936748 6281 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 10:43:34.936754 6281 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 10:43:34.936760 6281 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 10:43:34.936757 6281 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.936952 6281 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937069 6281 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937110 6281 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937431 6281 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 10:43:34.937510 6281 factory.go:656] Stopping watch factory\\\\nI0126 10:43:34.937540 6281 ovnkube.go:599] Stopped ovnkube\\\\nI0126 10:43:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.030470 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" Jan 26 10:43:38 crc kubenswrapper[5003]: W0126 10:43:38.046566 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0eac8a62_4b57_4423_b3b6_5d62047182f3.slice/crio-44b030283b457171c42bc146afc795a6e546792d4f2e35de682d18eda9886671 WatchSource:0}: Error finding container 44b030283b457171c42bc146afc795a6e546792d4f2e35de682d18eda9886671: Status 404 returned error can't find the container with id 44b030283b457171c42bc146afc795a6e546792d4f2e35de682d18eda9886671 Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.051977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.052024 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.052060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.052079 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.052092 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.154335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.154367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.154378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.154394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.154405 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.209639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.209669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.209677 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.209690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.209699 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.214775 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/0.log" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.217859 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.218028 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.219186 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" event={"ID":"0eac8a62-4b57-4423-b3b6-5d62047182f3","Type":"ContainerStarted","Data":"44b030283b457171c42bc146afc795a6e546792d4f2e35de682d18eda9886671"} Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.226261 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.234469 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.234920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.234981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.235003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.235032 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.235054 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.248139 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.256541 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.260681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.260746 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.260761 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.260796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.260810 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.271878 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.277965 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.282658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.282690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.282699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.282714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.282723 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.288133 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.299678 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.303431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.303613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.303707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.303820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.303892 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.306798 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.327376 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: E0126 10:43:38.327587 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.329692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.329732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.329744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.329773 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.329782 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.331513 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.351596 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.369820 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.393932 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.405131 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.422272 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:35Z\\\",\\\"message\\\":\\\"0:43:34.936707 6281 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 10:43:34.936704 6281 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 10:43:34.936731 6281 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 10:43:34.936742 6281 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 10:43:34.936748 6281 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 10:43:34.936754 6281 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 10:43:34.936760 6281 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 10:43:34.936757 6281 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.936952 6281 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937069 6281 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937110 6281 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937431 6281 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 10:43:34.937510 6281 factory.go:656] Stopping watch factory\\\\nI0126 10:43:34.937540 6281 ovnkube.go:599] Stopped ovnkube\\\\nI0126 10:43:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.433862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.433904 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.433918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.433937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.433950 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.436993 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.449948 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.469106 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.488003 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.502675 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.536699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.536735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.536760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.536775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.536786 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.639038 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.639361 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.639432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.639799 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.640038 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.742409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.742440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.742448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.742460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.742483 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.845448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.845483 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.845491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.845520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.845529 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.947849 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.947911 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.947925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.947942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.947954 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:38Z","lastTransitionTime":"2026-01-26T10:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:38 crc kubenswrapper[5003]: I0126 10:43:38.968598 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 07:29:01.727392819 +0000 UTC Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.050665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.050731 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.050748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.050772 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.050806 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.153748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.153798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.153814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.153832 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.153844 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.225814 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/1.log" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.226688 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/0.log" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.232167 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb" exitCode=1 Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.232264 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.232382 5003 scope.go:117] "RemoveContainer" containerID="889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.233016 5003 scope.go:117] "RemoveContainer" containerID="6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb" Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.233179 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.239329 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-4jrnq"] Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.239784 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.239845 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.241531 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" event={"ID":"0eac8a62-4b57-4423-b3b6-5d62047182f3","Type":"ContainerStarted","Data":"98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.241668 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" event={"ID":"0eac8a62-4b57-4423-b3b6-5d62047182f3","Type":"ContainerStarted","Data":"e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.259308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.259379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.259401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.259429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.259452 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.269370 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:35Z\\\",\\\"message\\\":\\\"0:43:34.936707 6281 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 10:43:34.936704 6281 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 10:43:34.936731 6281 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 10:43:34.936742 6281 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 10:43:34.936748 6281 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 10:43:34.936754 6281 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 10:43:34.936760 6281 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 10:43:34.936757 6281 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.936952 6281 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937069 6281 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937110 6281 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937431 6281 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 10:43:34.937510 6281 factory.go:656] Stopping watch factory\\\\nI0126 10:43:34.937540 6281 ovnkube.go:599] Stopped ovnkube\\\\nI0126 10:43:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"3:38.731813 6422 services_controller.go:360] Finished syncing service ingress-canary on namespace openshift-ingress-canary for network=default : 1.684578ms\\\\nI0126 10:43:38.731818 6422 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0126 10:43:38.731443 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z]\\\\nI0126 10:43:38.731831 6422 services_controller.go:356] Processing sync for service openshift-ovn-kubernetes/ovn-kubernetes-control-plane for network=default\\\\nI0126 10:43:38.731842 6422 services_co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.281918 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.295662 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.311403 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.326758 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.347397 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.361660 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5z9l\" (UniqueName: \"kubernetes.io/projected/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-kube-api-access-h5z9l\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.361770 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.361998 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.362467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.362509 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.362523 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.362540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.362549 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.376883 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.393145 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.407496 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.437903 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.456599 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.463859 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5z9l\" (UniqueName: \"kubernetes.io/projected/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-kube-api-access-h5z9l\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.463930 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.464090 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.464146 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs podName:aa06185d-fe5e-423a-b5a7-19e8bb7c8a60 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:39.964126552 +0000 UTC m=+35.505352113 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs") pod "network-metrics-daemon-4jrnq" (UID: "aa06185d-fe5e-423a-b5a7-19e8bb7c8a60") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.465353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.465420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.465440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.465466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.465483 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.476205 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.494943 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5z9l\" (UniqueName: \"kubernetes.io/projected/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-kube-api-access-h5z9l\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.498548 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.511583 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.521950 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.535081 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.546625 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.562922 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.567413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.567441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.567449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.567461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.567471 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.576623 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.590635 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.603863 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.631114 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.649197 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.666136 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.669818 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.669848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.669860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.669875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.669909 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.681332 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.694789 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.708378 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.730892 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:35Z\\\",\\\"message\\\":\\\"0:43:34.936707 6281 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 10:43:34.936704 6281 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 10:43:34.936731 6281 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 10:43:34.936742 6281 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 10:43:34.936748 6281 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 10:43:34.936754 6281 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 10:43:34.936760 6281 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 10:43:34.936757 6281 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.936952 6281 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937069 6281 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937110 6281 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937431 6281 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 10:43:34.937510 6281 factory.go:656] Stopping watch factory\\\\nI0126 10:43:34.937540 6281 ovnkube.go:599] Stopped ovnkube\\\\nI0126 10:43:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"3:38.731813 6422 services_controller.go:360] Finished syncing service ingress-canary on namespace openshift-ingress-canary for network=default : 1.684578ms\\\\nI0126 10:43:38.731818 6422 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0126 10:43:38.731443 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z]\\\\nI0126 10:43:38.731831 6422 services_controller.go:356] Processing sync for service openshift-ovn-kubernetes/ovn-kubernetes-control-plane for network=default\\\\nI0126 10:43:38.731842 6422 services_co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.742874 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.755060 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.770163 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.778414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.778462 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.778473 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.778502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.778513 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.794488 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:39Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.880866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.880914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.880925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.880946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.880959 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.969202 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 10:25:31.779297702 +0000 UTC Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.969603 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.969816 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:43:55.969793145 +0000 UTC m=+51.511018706 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.969882 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.970011 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.970111 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.970209 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.970116 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.970242 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:55.970214737 +0000 UTC m=+51.511440428 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.970266 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.970311 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:55.970273508 +0000 UTC m=+51.511499299 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:39 crc kubenswrapper[5003]: E0126 10:43:39.970336 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs podName:aa06185d-fe5e-423a-b5a7-19e8bb7c8a60 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:40.97032296 +0000 UTC m=+36.511548731 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs") pod "network-metrics-daemon-4jrnq" (UID: "aa06185d-fe5e-423a-b5a7-19e8bb7c8a60") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.983711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.983764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.983779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.983797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:39 crc kubenswrapper[5003]: I0126 10:43:39.983810 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:39Z","lastTransitionTime":"2026-01-26T10:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.001147 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.001203 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.001165 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.001440 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.001538 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.001734 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.072044 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.072158 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.072256 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.072308 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.072320 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.072379 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:56.072359574 +0000 UTC m=+51.613585135 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.072440 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.072470 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.072492 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.072588 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:56.07256187 +0000 UTC m=+51.613787471 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.086086 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.086129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.086140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.086157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.086166 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.189266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.189328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.189344 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.189375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.189390 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.248050 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/1.log" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.291575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.291644 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.291667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.291694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.291716 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.394649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.394692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.394703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.394717 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.394730 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.497688 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.497755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.497774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.497800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.497817 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.600718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.600781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.600797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.600821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.600839 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.704372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.704450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.704474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.704505 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.704529 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.807701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.807747 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.807760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.807779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.807791 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.911439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.911495 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.911511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.911534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.911551 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:40Z","lastTransitionTime":"2026-01-26T10:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.970316 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 23:14:54.004163603 +0000 UTC Jan 26 10:43:40 crc kubenswrapper[5003]: I0126 10:43:40.983921 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.984183 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:40 crc kubenswrapper[5003]: E0126 10:43:40.984326 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs podName:aa06185d-fe5e-423a-b5a7-19e8bb7c8a60 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:42.98425788 +0000 UTC m=+38.525483481 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs") pod "network-metrics-daemon-4jrnq" (UID: "aa06185d-fe5e-423a-b5a7-19e8bb7c8a60") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.001553 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:41 crc kubenswrapper[5003]: E0126 10:43:41.001765 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.014023 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.014113 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.014127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.014165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.014180 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.116900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.116956 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.116973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.116998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.117015 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.220191 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.220256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.220273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.220352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.220372 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.322879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.322927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.322942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.322962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.322976 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.426427 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.426503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.426525 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.426550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.426572 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.529639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.529714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.529731 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.529752 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.529797 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.631932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.631966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.631974 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.631992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.632008 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.733733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.733781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.733792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.733807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.733818 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.835734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.835797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.835815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.835838 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.835855 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.938745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.938831 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.938869 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.938905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.938940 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:41Z","lastTransitionTime":"2026-01-26T10:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:41 crc kubenswrapper[5003]: I0126 10:43:41.971045 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 14:51:33.718541521 +0000 UTC Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.000617 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.000676 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:42 crc kubenswrapper[5003]: E0126 10:43:42.000723 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:42 crc kubenswrapper[5003]: E0126 10:43:42.000754 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.000682 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:42 crc kubenswrapper[5003]: E0126 10:43:42.000817 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.041828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.041868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.041875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.041893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.041905 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.145032 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.145084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.145100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.145123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.145140 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.247674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.247737 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.247757 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.247780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.247796 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.350502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.350548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.350559 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.350575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.350587 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.453713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.453774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.453791 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.453816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.453838 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.556264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.556392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.556414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.556441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.556462 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.659632 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.659707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.659732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.659762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.659786 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.763188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.763269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.763325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.763353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.763372 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.866486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.866543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.866570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.866599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.866622 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.969667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.969965 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.970051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.970120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.970189 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:42Z","lastTransitionTime":"2026-01-26T10:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:42 crc kubenswrapper[5003]: I0126 10:43:42.972093 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 17:27:50.610280562 +0000 UTC Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.001276 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:43 crc kubenswrapper[5003]: E0126 10:43:43.001596 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.007152 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:43 crc kubenswrapper[5003]: E0126 10:43:43.007362 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:43 crc kubenswrapper[5003]: E0126 10:43:43.007423 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs podName:aa06185d-fe5e-423a-b5a7-19e8bb7c8a60 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:47.007403425 +0000 UTC m=+42.548629006 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs") pod "network-metrics-daemon-4jrnq" (UID: "aa06185d-fe5e-423a-b5a7-19e8bb7c8a60") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.073452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.073676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.073753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.073863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.073949 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.175847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.176259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.176791 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.177032 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.177428 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.280812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.280856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.280866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.280885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.280898 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.383566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.384325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.384434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.384522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.384621 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.487477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.487543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.487601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.487625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.487644 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.590519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.590578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.590602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.590630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.590651 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.692980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.693085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.693121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.693154 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.693192 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.795825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.795888 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.795920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.795943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.795964 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.899339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.899391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.899408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.899430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.899448 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:43Z","lastTransitionTime":"2026-01-26T10:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:43 crc kubenswrapper[5003]: I0126 10:43:43.972438 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 01:17:43.486023305 +0000 UTC Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.000774 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:44 crc kubenswrapper[5003]: E0126 10:43:44.001125 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.000842 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:44 crc kubenswrapper[5003]: E0126 10:43:44.001380 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.000836 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:44 crc kubenswrapper[5003]: E0126 10:43:44.001595 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.002490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.002521 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.002529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.002545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.002553 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.105990 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.106034 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.106046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.106063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.106073 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.209241 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.209328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.209366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.209387 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.209404 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.312448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.312497 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.312506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.312526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.312539 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.416251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.417088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.417193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.417301 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.417432 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.519567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.519638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.519677 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.519697 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.519709 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.622151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.622252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.622262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.622276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.622311 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.724780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.725015 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.725129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.725217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.725330 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.827416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.827730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.827829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.827916 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.828001 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.930393 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.930469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.930487 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.930511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.930529 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:44Z","lastTransitionTime":"2026-01-26T10:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:44 crc kubenswrapper[5003]: I0126 10:43:44.973130 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 00:46:06.374660453 +0000 UTC Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.001566 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:45 crc kubenswrapper[5003]: E0126 10:43:45.001840 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.014604 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.033615 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.033655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.033664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.033681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.033693 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.034802 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.052750 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.068706 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.081820 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.096893 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.113177 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.126098 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.136594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.136824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.136948 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.137072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.137214 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.153617 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.171204 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.186972 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.198999 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.212483 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.224125 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.239919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.240147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.240251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.240353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.240428 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.240546 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://889aad833a39b6c921ed34562ee39466eeb30b5f7a0886c1f4dd7c83993b5b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:35Z\\\",\\\"message\\\":\\\"0:43:34.936707 6281 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 10:43:34.936704 6281 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 10:43:34.936731 6281 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 10:43:34.936742 6281 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 10:43:34.936748 6281 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 10:43:34.936754 6281 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 10:43:34.936760 6281 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 10:43:34.936757 6281 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.936952 6281 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937069 6281 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937110 6281 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 10:43:34.937431 6281 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 10:43:34.937510 6281 factory.go:656] Stopping watch factory\\\\nI0126 10:43:34.937540 6281 ovnkube.go:599] Stopped ovnkube\\\\nI0126 10:43:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"3:38.731813 6422 services_controller.go:360] Finished syncing service ingress-canary on namespace openshift-ingress-canary for network=default : 1.684578ms\\\\nI0126 10:43:38.731818 6422 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0126 10:43:38.731443 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z]\\\\nI0126 10:43:38.731831 6422 services_controller.go:356] Processing sync for service openshift-ovn-kubernetes/ovn-kubernetes-control-plane for network=default\\\\nI0126 10:43:38.731842 6422 services_co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.274938 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.300835 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:45Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.342564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.342842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.342907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.343066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.343153 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.444939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.444992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.445003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.445021 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.445032 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.547382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.547414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.547423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.547436 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.547447 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.650236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.650293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.650302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.650317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.650326 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.754694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.754913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.755024 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.755100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.755176 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.857270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.857518 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.857620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.857709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.857774 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.959587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.959812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.959823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.959837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.959848 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:45Z","lastTransitionTime":"2026-01-26T10:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:45 crc kubenswrapper[5003]: I0126 10:43:45.974019 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 21:23:32.136386828 +0000 UTC Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.001400 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:46 crc kubenswrapper[5003]: E0126 10:43:46.001569 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.001902 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:46 crc kubenswrapper[5003]: E0126 10:43:46.002023 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.002121 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:46 crc kubenswrapper[5003]: E0126 10:43:46.002222 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.063027 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.063363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.063536 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.063660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.063759 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.166359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.166415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.166430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.166451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.166469 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.269564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.269620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.269634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.269653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.269667 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.372203 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.372256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.372272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.372339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.372357 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.474681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.475328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.475427 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.475520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.475613 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.578774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.578835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.578844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.578857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.578866 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.681908 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.682137 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.682261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.682404 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.682478 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.784893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.784966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.784989 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.785019 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.785060 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.887879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.887917 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.887927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.887942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.887952 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.975134 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 10:38:35.278663233 +0000 UTC Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.990329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.990395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.990407 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.990423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:46 crc kubenswrapper[5003]: I0126 10:43:46.990435 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:46Z","lastTransitionTime":"2026-01-26T10:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.001852 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:47 crc kubenswrapper[5003]: E0126 10:43:47.002023 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.050323 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:47 crc kubenswrapper[5003]: E0126 10:43:47.050512 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:47 crc kubenswrapper[5003]: E0126 10:43:47.050592 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs podName:aa06185d-fe5e-423a-b5a7-19e8bb7c8a60 nodeName:}" failed. No retries permitted until 2026-01-26 10:43:55.050572207 +0000 UTC m=+50.591797768 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs") pod "network-metrics-daemon-4jrnq" (UID: "aa06185d-fe5e-423a-b5a7-19e8bb7c8a60") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.092773 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.092821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.092836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.092855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.092869 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.195772 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.195838 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.195851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.195869 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.195881 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.299375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.299443 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.299464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.299490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.299509 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.402764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.402813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.402832 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.402857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.402874 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.506428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.506477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.506486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.506502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.506513 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.609601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.609646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.609661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.609681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.609694 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.712526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.712789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.712857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.712918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.712984 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.816797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.817261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.817500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.817685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.817854 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.920551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.920627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.920650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.920680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.920703 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:47Z","lastTransitionTime":"2026-01-26T10:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:47 crc kubenswrapper[5003]: I0126 10:43:47.976022 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 18:45:06.116338143 +0000 UTC Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.001601 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.001608 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.001993 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.002134 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.001608 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.002487 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.023183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.023249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.023270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.023322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.023337 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.126469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.126526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.126538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.126565 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.126578 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.229406 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.229446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.229454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.229468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.229476 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.332735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.332800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.332822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.332851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.332873 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.434880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.434924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.434934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.434950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.434965 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.438838 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.438889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.438903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.438927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.438941 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.454315 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:48Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.459944 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.460032 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.460099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.460169 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.460247 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.473069 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:48Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.477771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.477822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.477835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.477857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.477872 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.495942 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:48Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.500913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.500970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.500986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.501008 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.501024 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.520153 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:48Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.524661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.524798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.524889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.524995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.525083 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.539308 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:48Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:48 crc kubenswrapper[5003]: E0126 10:43:48.539488 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.541757 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.541820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.541837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.541862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.541879 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.644153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.644436 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.644498 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.644583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.644678 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.748146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.748189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.748205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.748226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.748243 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.851526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.851571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.851581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.851596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.851606 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.954206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.954259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.954272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.954312 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.954326 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:48Z","lastTransitionTime":"2026-01-26T10:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:48 crc kubenswrapper[5003]: I0126 10:43:48.976785 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 14:02:26.98993173 +0000 UTC Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.001499 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:49 crc kubenswrapper[5003]: E0126 10:43:49.001651 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.056217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.056321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.056347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.056374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.056396 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.191691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.191733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.191745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.191761 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.191773 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.294163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.294201 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.294212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.294230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.294241 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.396580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.396625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.396634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.396648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.396657 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.499644 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.499718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.499739 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.499767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.499788 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.602871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.602922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.602939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.602961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.602977 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.705613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.705655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.705667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.705683 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.705693 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.809023 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.809072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.809085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.809103 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.809116 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.912769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.912830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.912850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.912875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.912893 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:49Z","lastTransitionTime":"2026-01-26T10:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:49 crc kubenswrapper[5003]: I0126 10:43:49.978013 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 22:27:05.211582963 +0000 UTC Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.001388 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.001427 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.001524 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:50 crc kubenswrapper[5003]: E0126 10:43:50.001698 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:50 crc kubenswrapper[5003]: E0126 10:43:50.001797 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:50 crc kubenswrapper[5003]: E0126 10:43:50.001899 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.015297 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.015337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.015346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.015360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.015371 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.118638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.118694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.118708 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.118728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.118747 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.221253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.221349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.221372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.221394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.221410 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.323542 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.323580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.323590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.323605 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.323615 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.425604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.425678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.425691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.425711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.425724 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.528408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.528459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.528470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.528487 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.528499 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.631065 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.631118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.631134 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.631152 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.631166 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.733337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.733374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.733386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.733421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.733433 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.842258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.842337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.842350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.842370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.842383 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.945600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.945669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.945692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.945724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.945745 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:50Z","lastTransitionTime":"2026-01-26T10:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:50 crc kubenswrapper[5003]: I0126 10:43:50.978163 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 03:51:08.881372199 +0000 UTC Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.001100 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:51 crc kubenswrapper[5003]: E0126 10:43:51.001323 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.048962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.049002 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.049014 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.049029 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.049040 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.152067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.152155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.152181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.152214 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.152237 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.254883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.254927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.254940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.254957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.254970 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.358643 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.358700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.358714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.358733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.358749 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.461615 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.461659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.461673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.461690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.461706 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.565352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.565388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.565400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.565435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.565448 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.668446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.668483 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.668494 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.668512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.668525 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.772491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.772540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.772550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.772564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.772574 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.874789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.874858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.874875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.874907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.874931 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.977919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.977984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.977995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.978010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.978022 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:51Z","lastTransitionTime":"2026-01-26T10:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:51 crc kubenswrapper[5003]: I0126 10:43:51.978414 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 16:27:47.060999814 +0000 UTC Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.001549 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.001565 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.001669 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:52 crc kubenswrapper[5003]: E0126 10:43:52.001693 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:52 crc kubenswrapper[5003]: E0126 10:43:52.001924 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:52 crc kubenswrapper[5003]: E0126 10:43:52.002033 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.081081 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.081128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.081138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.081155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.081167 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.183936 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.184215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.184344 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.184463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.184659 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.286354 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.286429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.286440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.286454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.286465 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.389035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.389328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.389413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.389488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.389557 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.491730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.491958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.492064 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.492172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.492238 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.594270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.594405 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.594459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.594484 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.594500 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.696797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.697438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.697485 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.697502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.697517 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.800422 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.800480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.800496 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.800517 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.800529 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.903311 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.903365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.903376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.903392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.903403 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:52Z","lastTransitionTime":"2026-01-26T10:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.978822 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 23:10:24.176120406 +0000 UTC Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.986100 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:52 crc kubenswrapper[5003]: I0126 10:43:52.986967 5003 scope.go:117] "RemoveContainer" containerID="6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.001463 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:53 crc kubenswrapper[5003]: E0126 10:43:53.001700 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.003304 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.017558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.017608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.017625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.017649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.017666 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.029462 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.042384 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.052146 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.073475 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.084887 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.093870 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.104883 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.117725 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.120081 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.120211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.120305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.120391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.120464 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.128239 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.144685 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"3:38.731813 6422 services_controller.go:360] Finished syncing service ingress-canary on namespace openshift-ingress-canary for network=default : 1.684578ms\\\\nI0126 10:43:38.731818 6422 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0126 10:43:38.731443 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z]\\\\nI0126 10:43:38.731831 6422 services_controller.go:356] Processing sync for service openshift-ovn-kubernetes/ovn-kubernetes-control-plane for network=default\\\\nI0126 10:43:38.731842 6422 services_co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.160574 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.172953 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.184514 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.197445 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.211523 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.222970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.223002 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.223010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.223024 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.223033 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.224615 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:53Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.324608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.324638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.324646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.324660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.324670 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.427712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.427802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.427833 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.427862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.427880 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.530833 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.530878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.530890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.530907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.530921 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.632853 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.632899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.632912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.632929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.632939 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.735628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.735684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.735694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.735709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.735718 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.838394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.838446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.838454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.838471 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.838481 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.940476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.940528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.940541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.940560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.940575 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:53Z","lastTransitionTime":"2026-01-26T10:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:53 crc kubenswrapper[5003]: I0126 10:43:53.979847 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 23:22:58.525915043 +0000 UTC Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.001266 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.001357 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.001336 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:54 crc kubenswrapper[5003]: E0126 10:43:54.001481 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:54 crc kubenswrapper[5003]: E0126 10:43:54.001798 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:54 crc kubenswrapper[5003]: E0126 10:43:54.001889 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.043368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.043417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.043429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.043445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.043457 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.146189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.146232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.146248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.146267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.146305 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.248716 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.248759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.248774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.248796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.248810 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.296169 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/1.log" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.300455 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.301373 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.321870 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.341638 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.352040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.352090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.352102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.352123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.352137 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.358661 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.374873 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.389801 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.406999 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.426803 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.439757 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.454457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.454499 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.454508 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.454524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.454533 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.454892 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.470477 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.484428 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.495023 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.506529 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.519148 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.533444 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.546961 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.556926 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.557313 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.557402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.557537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.557655 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.565607 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"3:38.731813 6422 services_controller.go:360] Finished syncing service ingress-canary on namespace openshift-ingress-canary for network=default : 1.684578ms\\\\nI0126 10:43:38.731818 6422 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0126 10:43:38.731443 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z]\\\\nI0126 10:43:38.731831 6422 services_controller.go:356] Processing sync for service openshift-ovn-kubernetes/ovn-kubernetes-control-plane for network=default\\\\nI0126 10:43:38.731842 6422 services_co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:54Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.660400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.660707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.660788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.660897 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.660961 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.764186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.764234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.764245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.764262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.764274 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.866999 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.867049 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.867063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.867084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.867099 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.969260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.969347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.969366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.969390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.969408 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:54Z","lastTransitionTime":"2026-01-26T10:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:54 crc kubenswrapper[5003]: I0126 10:43:54.980610 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 13:55:55.395460171 +0000 UTC Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.001276 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:55 crc kubenswrapper[5003]: E0126 10:43:55.001523 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.018707 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.036523 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.052753 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.069491 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.072849 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.073062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.073143 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.073422 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.073508 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.084507 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.103776 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.114476 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.125047 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.140644 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:55 crc kubenswrapper[5003]: E0126 10:43:55.140916 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:55 crc kubenswrapper[5003]: E0126 10:43:55.141033 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs podName:aa06185d-fe5e-423a-b5a7-19e8bb7c8a60 nodeName:}" failed. No retries permitted until 2026-01-26 10:44:11.14101484 +0000 UTC m=+66.682240401 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs") pod "network-metrics-daemon-4jrnq" (UID: "aa06185d-fe5e-423a-b5a7-19e8bb7c8a60") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.143384 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.155701 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.171315 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"3:38.731813 6422 services_controller.go:360] Finished syncing service ingress-canary on namespace openshift-ingress-canary for network=default : 1.684578ms\\\\nI0126 10:43:38.731818 6422 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0126 10:43:38.731443 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z]\\\\nI0126 10:43:38.731831 6422 services_controller.go:356] Processing sync for service openshift-ovn-kubernetes/ovn-kubernetes-control-plane for network=default\\\\nI0126 10:43:38.731842 6422 services_co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.175950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.175984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.175995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.176009 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.176020 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.185363 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.197126 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.207582 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.215959 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.225466 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.236246 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.279760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.279802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.279812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.279828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.279839 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.304463 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/2.log" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.305236 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/1.log" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.307800 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6" exitCode=1 Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.307836 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.307872 5003 scope.go:117] "RemoveContainer" containerID="6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.308501 5003 scope.go:117] "RemoveContainer" containerID="38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6" Jan 26 10:43:55 crc kubenswrapper[5003]: E0126 10:43:55.308664 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.330463 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6118732ff82b2af89bcac73a5fee09e3c75a895312a5f4de750303a7b241c9eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:38Z\\\",\\\"message\\\":\\\"3:38.731813 6422 services_controller.go:360] Finished syncing service ingress-canary on namespace openshift-ingress-canary for network=default : 1.684578ms\\\\nI0126 10:43:38.731818 6422 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF0126 10:43:38.731443 6422 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:38Z is after 2025-08-24T17:21:41Z]\\\\nI0126 10:43:38.731831 6422 services_controller.go:356] Processing sync for service openshift-ovn-kubernetes/ovn-kubernetes-control-plane for network=default\\\\nI0126 10:43:38.731842 6422 services_co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.345815 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.358931 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.373929 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.381868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.381898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.381909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.381924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.381935 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.387615 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.400494 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.413054 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.423834 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.437433 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.450205 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.460236 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.480103 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.483885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.483918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.483931 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.483947 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.483961 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.492828 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.505083 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.516056 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.528871 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.538005 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:55Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.586780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.587024 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.587107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.587179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.587240 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.689553 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.689594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.689614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.689641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.689658 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.792093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.792789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.792920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.793041 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.793121 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.895579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.895610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.895619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.895633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.895644 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.981655 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 10:54:52.694527099 +0000 UTC Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.999151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.999588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.999662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.999812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:55 crc kubenswrapper[5003]: I0126 10:43:55.999911 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:55Z","lastTransitionTime":"2026-01-26T10:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.001376 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.001527 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.001537 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.001398 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.001775 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.001654 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.050101 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.050192 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.050270 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:44:28.05023918 +0000 UTC m=+83.591464751 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.050336 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.050389 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:44:28.050376364 +0000 UTC m=+83.591601925 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.050472 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.050586 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.050630 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:44:28.050622221 +0000 UTC m=+83.591847782 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.103345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.103548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.103673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.103759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.103821 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.151826 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.152115 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.152172 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.152197 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.152298 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 10:44:28.152250764 +0000 UTC m=+83.693476315 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.152428 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.152664 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.152747 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.152825 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.152974 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 10:44:28.152952994 +0000 UTC m=+83.694178755 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.206689 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.206743 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.206760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.206820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.206836 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.309372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.309421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.309435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.309457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.309469 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.312229 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/2.log" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.315460 5003 scope.go:117] "RemoveContainer" containerID="38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6" Jan 26 10:43:56 crc kubenswrapper[5003]: E0126 10:43:56.315642 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.333108 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.354153 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.363319 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.392940 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.409416 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.412251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.412326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.412340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.412359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.412372 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.432079 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.448127 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.465689 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.480503 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.499519 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.508250 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.514460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.514496 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.514508 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.514524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.514536 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.519176 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.529247 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.536857 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.546092 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.556523 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.567509 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:56Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.618766 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.619473 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.619526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.619549 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.619562 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.722723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.722785 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.722808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.722835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.722860 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.825093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.825230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.825244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.825262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.825274 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.928094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.928137 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.928149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.928164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.928174 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:56Z","lastTransitionTime":"2026-01-26T10:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:56 crc kubenswrapper[5003]: I0126 10:43:56.982530 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 09:19:26.526946939 +0000 UTC Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.001023 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:57 crc kubenswrapper[5003]: E0126 10:43:57.001179 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.030353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.030385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.030400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.030418 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.030429 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.133465 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.133519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.133533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.133604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.133620 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.236084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.236115 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.236126 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.236143 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.236152 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.338872 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.338909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.338919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.338938 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.338957 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.442576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.442621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.442632 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.442650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.442660 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.546245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.546391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.546417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.546445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.546466 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.649626 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.649698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.649738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.649774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.649798 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.747751 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.752578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.752640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.752666 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.752697 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.752723 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.762192 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.766124 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.786408 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.804319 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.815203 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.824654 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.834630 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.851151 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.855411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.855487 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.855522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.855548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.855568 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.863427 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.876261 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.887731 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.901946 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.916858 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.935668 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.947140 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.957678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.957711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.957721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.957738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.957750 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:57Z","lastTransitionTime":"2026-01-26T10:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.961305 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.980446 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.982711 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 14:01:00.70633494 +0000 UTC Jan 26 10:43:57 crc kubenswrapper[5003]: I0126 10:43:57.995640 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:57Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.000804 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.000917 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.000965 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.001057 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.001114 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.001177 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.060815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.060843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.060855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.060869 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.060879 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.163413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.163457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.163469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.163486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.163498 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.266389 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.266430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.266440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.266454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.266463 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.368852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.368945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.369165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.369208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.369221 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.471918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.472173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.472261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.472391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.472513 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.574969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.575246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.575337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.575426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.575504 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.679177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.679418 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.679483 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.679572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.679631 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.781465 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.781739 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.781822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.781931 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.782023 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.838119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.838760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.838873 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.838963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.839093 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.850599 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:58Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.853652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.853741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.853826 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.853926 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.853991 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.864570 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:58Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.867486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.867514 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.867534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.867546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.867556 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.877777 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:58Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.880902 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.880940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.880953 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.880982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.880997 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.893813 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:58Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.916995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.917117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.917238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.917377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.917652 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.932164 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:43:58Z is after 2025-08-24T17:21:41Z" Jan 26 10:43:58 crc kubenswrapper[5003]: E0126 10:43:58.932629 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.934222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.934370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.934490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.934612 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.934737 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:58Z","lastTransitionTime":"2026-01-26T10:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:58 crc kubenswrapper[5003]: I0126 10:43:58.983267 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 00:16:57.167742855 +0000 UTC Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.001028 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:43:59 crc kubenswrapper[5003]: E0126 10:43:59.001508 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.037082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.037132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.037145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.037200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.037211 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.139978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.140055 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.140067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.140082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.140092 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.242574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.242614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.242632 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.242649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.242659 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.344742 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.344798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.344809 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.344823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.344834 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.447361 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.447401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.447413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.447429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.447440 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.549642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.549681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.549691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.549706 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.549716 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.651481 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.651529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.651548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.651564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.651576 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.754092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.754133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.754144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.754162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.754174 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.857052 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.857155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.857191 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.857220 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.857243 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.960680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.960743 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.960760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.960786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.960807 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:43:59Z","lastTransitionTime":"2026-01-26T10:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:43:59 crc kubenswrapper[5003]: I0126 10:43:59.984374 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 19:00:22.980608572 +0000 UTC Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.001227 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.001267 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.001315 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:00 crc kubenswrapper[5003]: E0126 10:44:00.001475 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:00 crc kubenswrapper[5003]: E0126 10:44:00.001550 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:00 crc kubenswrapper[5003]: E0126 10:44:00.001633 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.063324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.063399 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.063416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.063440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.063458 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.166265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.166638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.166963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.167082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.167173 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.269247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.269336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.269374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.269392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.269404 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.372062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.372596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.372739 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.372860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.372974 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.475050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.475417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.475566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.475730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.475892 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.579338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.579379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.579392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.579407 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.579419 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.681511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.681814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.682027 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.682236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.682481 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.789822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.789899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.789918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.789943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.789963 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.891937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.891984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.891993 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.892009 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.892020 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.985138 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 06:35:18.52457952 +0000 UTC Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.994915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.994974 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.995000 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.995026 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:00 crc kubenswrapper[5003]: I0126 10:44:00.995045 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:00Z","lastTransitionTime":"2026-01-26T10:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.001748 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:01 crc kubenswrapper[5003]: E0126 10:44:01.001936 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.097957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.097994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.098003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.098018 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.098027 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.200362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.200432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.200457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.200486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.200509 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.303506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.303553 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.303563 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.303577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.303587 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.406378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.406446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.406467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.406496 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.406516 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.508123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.508176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.508187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.508204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.508225 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.610607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.610657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.610669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.610687 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.610699 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.714096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.714130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.714139 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.714154 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.714165 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.816511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.816554 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.816566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.816585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.816598 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.918468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.918530 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.918546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.918576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.918588 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:01Z","lastTransitionTime":"2026-01-26T10:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:01 crc kubenswrapper[5003]: I0126 10:44:01.985520 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 07:41:06.991209159 +0000 UTC Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.001088 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.001151 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:02 crc kubenswrapper[5003]: E0126 10:44:02.001194 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.001269 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:02 crc kubenswrapper[5003]: E0126 10:44:02.001362 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:02 crc kubenswrapper[5003]: E0126 10:44:02.001520 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.021220 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.021274 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.021329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.021348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.021362 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.125258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.125335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.125349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.125366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.125382 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.234875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.234933 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.234944 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.234960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.234970 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.338355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.338406 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.338427 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.338456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.338478 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.441025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.441085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.441103 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.441127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.441145 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.544606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.544675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.544692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.544717 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.544734 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.648146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.648218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.648253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.648333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.648357 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.750903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.750975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.750994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.751016 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.751032 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.853092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.853142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.853152 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.853167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.853177 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.955574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.955618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.955631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.955647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.955659 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:02Z","lastTransitionTime":"2026-01-26T10:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:02 crc kubenswrapper[5003]: I0126 10:44:02.986075 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 19:29:40.457200329 +0000 UTC Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.001123 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:03 crc kubenswrapper[5003]: E0126 10:44:03.001312 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.059458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.059520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.059535 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.059557 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.059575 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.161522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.161581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.161597 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.161618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.161632 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.264153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.264200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.264211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.264226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.264237 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.366209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.366246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.366254 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.366268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.366301 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.469135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.469213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.469236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.469260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.469306 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.571856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.571928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.571951 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.571978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.571997 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.675745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.675802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.675821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.675846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.675865 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.781604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.781666 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.781679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.781709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.781724 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.883909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.883973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.883984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.884005 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.884023 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.986345 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 01:52:02.376297906 +0000 UTC Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.986836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.986885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.986910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.986940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:03 crc kubenswrapper[5003]: I0126 10:44:03.986961 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:03Z","lastTransitionTime":"2026-01-26T10:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.001275 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.001422 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:04 crc kubenswrapper[5003]: E0126 10:44:04.001477 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.001492 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:04 crc kubenswrapper[5003]: E0126 10:44:04.001699 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:04 crc kubenswrapper[5003]: E0126 10:44:04.001727 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.089021 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.089058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.089067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.089080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.089094 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.191466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.191505 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.191517 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.191535 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.191547 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.294623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.294659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.294668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.294683 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.294692 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.398106 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.398158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.398167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.398187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.398199 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.500555 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.500596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.500608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.500620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.500628 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.603779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.603835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.603845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.603863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.603873 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.706273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.706378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.706395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.706417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.706429 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.809133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.809199 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.809215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.809242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.809259 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.911961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.912025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.912047 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.912076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.912100 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:04Z","lastTransitionTime":"2026-01-26T10:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:04 crc kubenswrapper[5003]: I0126 10:44:04.986804 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 09:19:39.918041435 +0000 UTC Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.000784 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:05 crc kubenswrapper[5003]: E0126 10:44:05.001015 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.015651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.015813 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.016020 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.016213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.016247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.016267 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.031058 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.048392 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.063610 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.081185 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.093399 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.108516 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.119096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.119149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.119166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.119185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.119199 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.122851 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.135228 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.146675 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.159112 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.171348 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.183209 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.196666 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.209560 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.222438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.222482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.222496 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.222512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.222523 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.222549 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.244361 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.256417 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:05Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.325982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.326039 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.326048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.326063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.326087 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.428109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.428186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.428211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.428240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.428263 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.531582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.531651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.531673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.531701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.531721 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.633465 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.633500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.633509 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.633523 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.633532 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.735121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.735425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.735434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.735448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.735458 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.837303 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.837329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.837337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.837349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.837357 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.939649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.939711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.939727 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.939749 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.939768 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:05Z","lastTransitionTime":"2026-01-26T10:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:05 crc kubenswrapper[5003]: I0126 10:44:05.986939 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 02:59:03.440120857 +0000 UTC Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.001565 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.001569 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:06 crc kubenswrapper[5003]: E0126 10:44:06.001784 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.001591 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:06 crc kubenswrapper[5003]: E0126 10:44:06.001862 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:06 crc kubenswrapper[5003]: E0126 10:44:06.001951 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.042652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.042712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.042730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.042753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.042769 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.145543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.145597 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.145614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.145638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.145657 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.248446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.248501 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.248517 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.248542 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.248560 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.350354 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.350403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.350419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.350438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.350457 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.453202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.453252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.453266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.453299 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.453313 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.555956 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.556017 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.556040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.556066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.556087 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.658506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.658581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.658604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.658635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.658659 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.760804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.760868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.760886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.760909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.760927 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.863032 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.863082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.863094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.863110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.863125 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.965650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.965689 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.965699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.965716 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.965726 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:06Z","lastTransitionTime":"2026-01-26T10:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:06 crc kubenswrapper[5003]: I0126 10:44:06.987349 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 10:42:59.411681335 +0000 UTC Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.000778 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:07 crc kubenswrapper[5003]: E0126 10:44:07.000969 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.068396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.068434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.068443 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.068457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.068470 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.171101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.171166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.171183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.171605 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.171659 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.274180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.274230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.274247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.274268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.274311 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.376802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.376836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.376847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.376861 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.376871 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.479056 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.479113 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.479129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.479147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.479163 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.582551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.582635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.582659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.582794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.582896 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.685349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.685400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.685412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.685431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.685444 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.787114 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.787151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.787162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.787177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.787190 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.889201 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.889243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.889254 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.889271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.889310 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.987484 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 03:17:14.882949403 +0000 UTC Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.991494 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.991536 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.991551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.991569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:07 crc kubenswrapper[5003]: I0126 10:44:07.991580 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:07Z","lastTransitionTime":"2026-01-26T10:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.000806 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:08 crc kubenswrapper[5003]: E0126 10:44:08.000941 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.001145 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:08 crc kubenswrapper[5003]: E0126 10:44:08.001212 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.001371 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:08 crc kubenswrapper[5003]: E0126 10:44:08.001436 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.093921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.093966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.093978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.093993 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.094004 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.196524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.196567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.196579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.196599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.196610 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.298814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.298855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.298867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.298888 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.298902 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.402408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.402446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.402456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.402475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.402486 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.505535 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.505568 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.505578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.505592 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.505602 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.608105 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.608178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.608189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.608229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.608244 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.710438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.710475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.710488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.710503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.710515 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.812960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.812999 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.813013 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.813031 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.813040 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.915439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.915514 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.915532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.915551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.915563 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:08Z","lastTransitionTime":"2026-01-26T10:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:08 crc kubenswrapper[5003]: I0126 10:44:08.988265 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 15:04:21.289914249 +0000 UTC Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.001639 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:09 crc kubenswrapper[5003]: E0126 10:44:09.002126 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.002384 5003 scope.go:117] "RemoveContainer" containerID="38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6" Jan 26 10:44:09 crc kubenswrapper[5003]: E0126 10:44:09.002643 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.017978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.018040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.018059 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.018090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.018107 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.120511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.120574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.120588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.120601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.120609 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.158803 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.158836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.158845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.158859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.158868 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: E0126 10:44:09.172589 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:09Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.176216 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.176248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.176257 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.176270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.176295 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: E0126 10:44:09.187645 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:09Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.191581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.191623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.191639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.191660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.191673 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: E0126 10:44:09.205046 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:09Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.208941 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.208984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.208995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.209011 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.209023 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: E0126 10:44:09.221218 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:09Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.224843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.225777 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.225840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.225998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.226142 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: E0126 10:44:09.246272 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:09Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:09 crc kubenswrapper[5003]: E0126 10:44:09.246471 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.249533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.249569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.249585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.249604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.249621 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.352570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.352623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.352636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.352660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.352671 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.455163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.455194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.455204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.455217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.455225 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.557524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.557555 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.557566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.557583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.557594 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.660763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.660815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.660829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.660847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.660860 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.762575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.762599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.762608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.762621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.762630 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.865380 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.865429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.865441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.865459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.865471 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.967678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.967726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.967740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.967758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.967772 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:09Z","lastTransitionTime":"2026-01-26T10:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:09 crc kubenswrapper[5003]: I0126 10:44:09.988372 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 22:09:38.629595664 +0000 UTC Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.000690 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.000753 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.000812 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:10 crc kubenswrapper[5003]: E0126 10:44:10.000838 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:10 crc kubenswrapper[5003]: E0126 10:44:10.000994 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:10 crc kubenswrapper[5003]: E0126 10:44:10.001072 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.070196 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.070242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.070251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.070267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.070275 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.172029 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.172083 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.172095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.172116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.172128 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.274095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.274143 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.274154 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.274172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.274185 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.376073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.376153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.376179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.376208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.376230 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.478713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.478751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.478763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.478779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.478790 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.581306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.581353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.581363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.581378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.581389 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.684237 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.684294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.684305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.684320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.684329 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.786368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.786408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.786419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.786433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.786443 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.889100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.889159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.889171 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.889190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.889204 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.989272 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 09:47:45.040992136 +0000 UTC Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.991972 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.992022 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.992033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.992053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:10 crc kubenswrapper[5003]: I0126 10:44:10.992067 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:10Z","lastTransitionTime":"2026-01-26T10:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.000827 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:11 crc kubenswrapper[5003]: E0126 10:44:11.000984 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.094531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.094570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.094581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.094598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.094611 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.196751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.196797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.196806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.196822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.196834 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.200176 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:11 crc kubenswrapper[5003]: E0126 10:44:11.200338 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:44:11 crc kubenswrapper[5003]: E0126 10:44:11.200421 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs podName:aa06185d-fe5e-423a-b5a7-19e8bb7c8a60 nodeName:}" failed. No retries permitted until 2026-01-26 10:44:43.200403196 +0000 UTC m=+98.741628757 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs") pod "network-metrics-daemon-4jrnq" (UID: "aa06185d-fe5e-423a-b5a7-19e8bb7c8a60") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.299459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.299500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.299509 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.299527 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.299537 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.401577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.401613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.401622 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.401640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.401649 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.503726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.503765 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.503774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.503788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.503797 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.606123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.606203 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.606215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.606230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.606241 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.708641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.708681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.708691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.708707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.708718 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.810880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.810965 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.810986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.811003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.811015 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.913739 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.913775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.913784 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.913802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.913811 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:11Z","lastTransitionTime":"2026-01-26T10:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:11 crc kubenswrapper[5003]: I0126 10:44:11.990106 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 17:30:37.998585605 +0000 UTC Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.001560 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.001610 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:12 crc kubenswrapper[5003]: E0126 10:44:12.001677 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.001747 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:12 crc kubenswrapper[5003]: E0126 10:44:12.001839 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:12 crc kubenswrapper[5003]: E0126 10:44:12.001972 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.016540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.016578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.016592 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.016607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.016619 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.119180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.119244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.119259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.119292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.119306 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.221540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.221574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.221583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.221597 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.221608 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.324054 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.324102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.324115 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.324133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.324145 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.426534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.426582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.426595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.426630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.426643 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.528974 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.529010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.529019 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.529032 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.529043 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.631802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.631844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.631855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.631893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.631903 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.734107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.734176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.734186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.734202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.734212 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.836798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.836844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.836855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.836874 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.836886 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.938868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.938898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.938906 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.938919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.938927 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:12Z","lastTransitionTime":"2026-01-26T10:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:12 crc kubenswrapper[5003]: I0126 10:44:12.990639 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 01:53:54.211763517 +0000 UTC Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.000987 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:13 crc kubenswrapper[5003]: E0126 10:44:13.001096 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.040698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.040735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.040749 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.040769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.040781 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.142584 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.142620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.142629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.142642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.142651 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.246614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.246659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.246671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.246686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.246698 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.349260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.349322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.349333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.349351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.349363 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.366447 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/0.log" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.366488 5003 generic.go:334] "Generic (PLEG): container finished" podID="9a2a5d08-c449-45c6-8e1f-340c076422db" containerID="432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4" exitCode=1 Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.366515 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpb6l" event={"ID":"9a2a5d08-c449-45c6-8e1f-340c076422db","Type":"ContainerDied","Data":"432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.366837 5003 scope.go:117] "RemoveContainer" containerID="432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.379431 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.391131 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.402995 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.413138 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.424788 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.435827 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.446326 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.452215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.452261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.452314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.452333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.452344 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.456982 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.469911 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.480241 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.505581 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.517091 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.526846 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.543006 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.554370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.554424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.554435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.554452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.554464 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.556249 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.568719 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:12Z\\\",\\\"message\\\":\\\"2026-01-26T10:43:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25\\\\n2026-01-26T10:43:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25 to /host/opt/cni/bin/\\\\n2026-01-26T10:43:27Z [verbose] multus-daemon started\\\\n2026-01-26T10:43:27Z [verbose] Readiness Indicator file check\\\\n2026-01-26T10:44:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.585309 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.597387 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:13Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.657382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.657442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.657454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.657470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.657480 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.759652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.759684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.759808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.759823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.759833 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.862348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.862402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.862413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.862430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.862478 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.964415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.964451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.964462 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.964477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.964488 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:13Z","lastTransitionTime":"2026-01-26T10:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:13 crc kubenswrapper[5003]: I0126 10:44:13.991647 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 23:11:45.355521931 +0000 UTC Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.001033 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:14 crc kubenswrapper[5003]: E0126 10:44:14.001195 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.001406 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:14 crc kubenswrapper[5003]: E0126 10:44:14.001469 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.001570 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:14 crc kubenswrapper[5003]: E0126 10:44:14.001611 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.066531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.066566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.066575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.066588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.066597 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.169865 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.169921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.169941 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.169971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.169996 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.272528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.272561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.272572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.272587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.272598 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.371306 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/0.log" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.371368 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpb6l" event={"ID":"9a2a5d08-c449-45c6-8e1f-340c076422db","Type":"ContainerStarted","Data":"385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.374976 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.375017 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.375029 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.375044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.375056 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.383909 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.399377 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.411490 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:12Z\\\",\\\"message\\\":\\\"2026-01-26T10:43:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25\\\\n2026-01-26T10:43:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25 to /host/opt/cni/bin/\\\\n2026-01-26T10:43:27Z [verbose] multus-daemon started\\\\n2026-01-26T10:43:27Z [verbose] Readiness Indicator file check\\\\n2026-01-26T10:44:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:44:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.429185 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.439660 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.448762 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.460566 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.470552 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.476955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.476988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.476998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.477015 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.477027 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.480988 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.493720 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.505146 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.518779 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.530409 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.542538 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.552891 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.572935 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.580795 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.580835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.580847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.580864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.580876 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.586573 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.597482 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:14Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.683206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.683234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.683243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.683255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.683264 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.785868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.785918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.785931 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.785950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.785961 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.888070 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.888110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.888118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.888131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.888140 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.991045 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.991086 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.991101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.991117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.991129 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:14Z","lastTransitionTime":"2026-01-26T10:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:14 crc kubenswrapper[5003]: I0126 10:44:14.992128 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 03:23:43.486203038 +0000 UTC Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.001468 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:15 crc kubenswrapper[5003]: E0126 10:44:15.001573 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.019105 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.030411 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.041017 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.053215 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.064258 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.073784 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.092961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.093018 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.093030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.093045 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.093056 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.099944 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.112772 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.124042 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.134404 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.144816 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.153556 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.169242 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.180464 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.190337 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.194930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.194960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.194972 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.194986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.194997 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.201081 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.215588 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.228899 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:12Z\\\",\\\"message\\\":\\\"2026-01-26T10:43:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25\\\\n2026-01-26T10:43:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25 to /host/opt/cni/bin/\\\\n2026-01-26T10:43:27Z [verbose] multus-daemon started\\\\n2026-01-26T10:43:27Z [verbose] Readiness Indicator file check\\\\n2026-01-26T10:44:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:44:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:15Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.296713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.296754 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.296764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.296780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.296790 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.399055 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.399131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.399143 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.399158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.399169 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.501747 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.501784 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.501798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.501814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.501824 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.603526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.603558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.603566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.603578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.603586 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.705922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.705959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.705967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.705982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.705992 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.808698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.808748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.808762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.808780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.808798 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.910635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.910704 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.910718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.910738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.910754 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:15Z","lastTransitionTime":"2026-01-26T10:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:15 crc kubenswrapper[5003]: I0126 10:44:15.992755 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 16:20:19.521110597 +0000 UTC Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.001125 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.001173 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.001217 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:16 crc kubenswrapper[5003]: E0126 10:44:16.001269 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:16 crc kubenswrapper[5003]: E0126 10:44:16.001374 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:16 crc kubenswrapper[5003]: E0126 10:44:16.001476 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.012753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.012786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.012798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.012814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.012826 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.116745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.116799 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.116817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.116851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.116867 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.219459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.219492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.219501 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.219514 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.219525 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.321970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.322023 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.322035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.322054 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.322064 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.424013 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.424097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.424123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.424153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.424175 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.526229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.526274 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.526297 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.526314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.526323 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.628801 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.628856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.628866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.628883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.628895 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.733039 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.733084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.733130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.733151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.733165 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.836085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.836125 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.836136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.836151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.836161 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.938649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.938684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.938693 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.938711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.938724 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:16Z","lastTransitionTime":"2026-01-26T10:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:16 crc kubenswrapper[5003]: I0126 10:44:16.993319 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 14:45:48.815625365 +0000 UTC Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.001830 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:17 crc kubenswrapper[5003]: E0126 10:44:17.002020 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.040831 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.040885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.040897 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.040913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.040923 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.142947 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.143000 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.143015 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.143033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.143045 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.245128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.245168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.245179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.245193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.245203 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.347941 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.347982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.347992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.348006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.348016 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.450525 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.450583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.450600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.450626 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.450652 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.552882 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.552920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.552931 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.552946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.552958 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.655026 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.655065 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.655073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.655089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.655099 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.757765 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.757816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.757828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.757845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.757856 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.859940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.859974 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.859987 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.860003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.860017 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.962564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.962599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.962610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.962627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.962640 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:17Z","lastTransitionTime":"2026-01-26T10:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:17 crc kubenswrapper[5003]: I0126 10:44:17.994336 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 04:41:14.786948215 +0000 UTC Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.001800 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.001950 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.001991 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:18 crc kubenswrapper[5003]: E0126 10:44:18.002088 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:18 crc kubenswrapper[5003]: E0126 10:44:18.002230 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:18 crc kubenswrapper[5003]: E0126 10:44:18.002403 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.012196 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.065218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.065261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.065270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.065298 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.065308 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.167340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.167395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.167408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.167426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.167438 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.269821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.269904 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.269937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.269966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.269990 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.373097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.373144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.373155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.373172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.373184 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.475474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.475538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.475550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.475568 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.475581 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.578183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.578234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.578250 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.578273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.578316 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.683790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.684001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.684017 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.684331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.684346 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.787010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.787063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.787076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.787099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.787115 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.889584 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.889633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.889642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.889658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.889668 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.992555 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.992595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.992605 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.992623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.992634 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:18Z","lastTransitionTime":"2026-01-26T10:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:18 crc kubenswrapper[5003]: I0126 10:44:18.994838 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 03:44:02.938221347 +0000 UTC Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.001191 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:19 crc kubenswrapper[5003]: E0126 10:44:19.001331 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.094961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.094992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.095001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.095015 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.095022 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.197346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.197410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.197420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.197436 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.197446 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.299978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.300023 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.300035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.300049 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.300058 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.402691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.402750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.402766 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.402789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.402809 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.454737 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.454792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.454807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.454825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.454839 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: E0126 10:44:19.466897 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:19Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.470940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.470984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.470996 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.471014 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.471029 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: E0126 10:44:19.483325 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:19Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.488376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.488522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.488552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.488584 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.488608 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: E0126 10:44:19.503945 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:19Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.507767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.507793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.507804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.507820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.507830 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: E0126 10:44:19.520227 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:19Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.523761 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.523786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.523793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.523805 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.523814 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: E0126 10:44:19.536259 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:19Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:19 crc kubenswrapper[5003]: E0126 10:44:19.536429 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.537977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.538005 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.538013 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.538037 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.538051 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.640696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.640773 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.640798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.640827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.640849 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.744233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.744294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.744309 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.744327 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.744338 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.846971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.847008 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.847021 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.847037 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.847049 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.949549 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.949587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.949595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.949611 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.949620 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:19Z","lastTransitionTime":"2026-01-26T10:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:19 crc kubenswrapper[5003]: I0126 10:44:19.995452 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 10:41:36.10864973 +0000 UTC Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.000728 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.000735 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:20 crc kubenswrapper[5003]: E0126 10:44:20.001024 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.000796 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:20 crc kubenswrapper[5003]: E0126 10:44:20.001880 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:20 crc kubenswrapper[5003]: E0126 10:44:20.002175 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.052665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.052724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.052740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.052762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.052778 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.155538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.155584 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.155596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.155612 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.155623 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.258366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.258422 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.258439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.258461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.258478 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.361203 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.361309 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.361337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.361367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.361388 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.464576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.464644 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.464659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.464682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.464701 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.569310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.569374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.569391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.569414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.569435 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.672051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.672316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.672449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.672551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.672654 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.775230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.775589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.775707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.775830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.775995 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.879617 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.879656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.879675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.879695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.879707 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.982153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.982186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.982196 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.982210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.982221 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:20Z","lastTransitionTime":"2026-01-26T10:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:20 crc kubenswrapper[5003]: I0126 10:44:20.995990 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 16:07:57.303285558 +0000 UTC Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.002190 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.002415 5003 scope.go:117] "RemoveContainer" containerID="38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6" Jan 26 10:44:21 crc kubenswrapper[5003]: E0126 10:44:21.002439 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.085403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.085726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.085814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.085952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.086038 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.189000 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.189038 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.189048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.189062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.189073 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.292267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.292394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.292417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.292446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.292463 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.394385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.394444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.394457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.394474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.394487 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.498138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.498194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.498211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.498238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.498254 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.602574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.602648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.602669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.602694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.602712 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.706118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.706170 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.706182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.706197 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.706208 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.807929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.807970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.807979 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.807992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.808003 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.910276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.910331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.910342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.910358 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.910369 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:21Z","lastTransitionTime":"2026-01-26T10:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:21 crc kubenswrapper[5003]: I0126 10:44:21.997206 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 08:08:33.043030496 +0000 UTC Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.001534 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.001550 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.001645 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:22 crc kubenswrapper[5003]: E0126 10:44:22.001790 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:22 crc kubenswrapper[5003]: E0126 10:44:22.001928 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:22 crc kubenswrapper[5003]: E0126 10:44:22.001974 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.012236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.012264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.012272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.012299 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.012308 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.115165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.115212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.115222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.115236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.115247 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.218046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.218093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.218107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.218125 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.218139 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.321390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.321436 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.321450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.321468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.321479 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.398075 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/2.log" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.401576 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.402372 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.422693 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.424213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.424304 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.424325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.424354 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.424370 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.441501 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.460337 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.473700 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.485414 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.508508 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.525983 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.526714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.526765 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.526816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.526844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.526864 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.542121 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.555423 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.568949 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.580474 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.605884 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:44:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.621385 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5fbe8df-fa5b-47cb-ae08-30e7d8de3a8d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a3c9c6bc10414e0ee33459ed2d373ebd6153bc455c01fb47ca394b17695acdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.629092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.629137 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.629147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.629162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.629172 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.636894 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.651517 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.666122 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.688232 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.701228 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:12Z\\\",\\\"message\\\":\\\"2026-01-26T10:43:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25\\\\n2026-01-26T10:43:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25 to /host/opt/cni/bin/\\\\n2026-01-26T10:43:27Z [verbose] multus-daemon started\\\\n2026-01-26T10:43:27Z [verbose] Readiness Indicator file check\\\\n2026-01-26T10:44:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:44:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.716665 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:22Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.731700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.731748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.731759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.731778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.731789 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.834324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.834364 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.834375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.834390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.834405 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.936853 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.936924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.936962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.936992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.937012 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:22Z","lastTransitionTime":"2026-01-26T10:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:22 crc kubenswrapper[5003]: I0126 10:44:22.998084 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 06:58:59.433533688 +0000 UTC Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.001504 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:23 crc kubenswrapper[5003]: E0126 10:44:23.001689 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.039467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.039531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.039545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.039563 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.039576 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.142503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.142561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.142579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.142603 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.142620 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.244713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.244749 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.244758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.244774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.244785 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.347092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.347143 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.347187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.347208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.347223 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.407568 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/3.log" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.408257 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/2.log" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.411584 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" exitCode=1 Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.411620 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.411654 5003 scope.go:117] "RemoveContainer" containerID="38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.412321 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:44:23 crc kubenswrapper[5003]: E0126 10:44:23.412505 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.430905 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.443639 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.449721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.449765 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.449776 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.449803 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.449812 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.456321 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.467475 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.480175 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.490619 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.501849 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5fbe8df-fa5b-47cb-ae08-30e7d8de3a8d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a3c9c6bc10414e0ee33459ed2d373ebd6153bc455c01fb47ca394b17695acdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.513598 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.524548 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.553196 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.553242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.553253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.553270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.553302 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.554991 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.585967 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.601014 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:12Z\\\",\\\"message\\\":\\\"2026-01-26T10:43:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25\\\\n2026-01-26T10:43:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25 to /host/opt/cni/bin/\\\\n2026-01-26T10:43:27Z [verbose] multus-daemon started\\\\n2026-01-26T10:43:27Z [verbose] Readiness Indicator file check\\\\n2026-01-26T10:44:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:44:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.618398 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38a9363b7cc831a8e7ccde1ea6ed7a5022d00de2d620f6c504382ff8647293c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:43:54Z\\\",\\\"message\\\":\\\"netes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165445 6637 lb_config.go:1031] Cluster endpoints for openshift-machine-api/cluster-autoscaler-operator for network=default are: map[]\\\\nI0126 10:43:54.165459 6637 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s in node crc\\\\nI0126 10:43:54.165473 6637 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s after 0 failed attempt(s)\\\\nI0126 10:43:54.165485 6637 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s\\\\nI0126 10:43:54.165486 6637 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI0126 10:43:54.165398 6637 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0126 10:43:54.165472 6637 services_controller.go:443] Built service openshift-machine-api/cluster-autoscaler-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.245\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, exte\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:22Z\\\",\\\"message\\\":\\\"erator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 10:44:22.293447 7055 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-operator per-node LB for network=default: []services.LB{}\\\\nI0126 10:44:22.293456 7055 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-operator template LB for network=default: []services.LB{}\\\\nI0126 10:44:22.293464 7055 services_controller.go:454] Service openshift-machine-config-operator/machine-config-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI0126 10:44:22.293443 7055 services_controller.go:451] Built service openshift-kube-storage-version-migrator-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:44:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.630948 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.645896 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.659246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.659373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.659397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.659419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.659431 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.659848 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.672497 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.687503 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.698825 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:23Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.762683 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.762749 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.762763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.762778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.762790 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.864954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.865025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.865036 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.865050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.865065 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.967780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.967842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.967860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.967888 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.967906 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:23Z","lastTransitionTime":"2026-01-26T10:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:23 crc kubenswrapper[5003]: I0126 10:44:23.998702 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 14:14:20.649055431 +0000 UTC Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.001018 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.001108 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.001145 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:24 crc kubenswrapper[5003]: E0126 10:44:24.001353 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:24 crc kubenswrapper[5003]: E0126 10:44:24.001546 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:24 crc kubenswrapper[5003]: E0126 10:44:24.001714 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.070441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.070510 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.070538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.070563 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.070584 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.173531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.173584 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.173601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.173623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.173641 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.275612 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.275870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.275966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.276040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.276123 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.378066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.378324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.378386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.378450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.378513 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.416729 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/3.log" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.419828 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:44:24 crc kubenswrapper[5003]: E0126 10:44:24.419971 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.436447 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.446420 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.458074 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.467233 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.476724 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.480341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.480380 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.480391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.480405 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.480416 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.501569 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.516703 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.528832 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.543490 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.556649 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.567062 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.576642 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5fbe8df-fa5b-47cb-ae08-30e7d8de3a8d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a3c9c6bc10414e0ee33459ed2d373ebd6153bc455c01fb47ca394b17695acdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.582589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.582634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.582647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.582663 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.582675 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.586794 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.597160 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.609199 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.625152 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.640753 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:12Z\\\",\\\"message\\\":\\\"2026-01-26T10:43:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25\\\\n2026-01-26T10:43:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25 to /host/opt/cni/bin/\\\\n2026-01-26T10:43:27Z [verbose] multus-daemon started\\\\n2026-01-26T10:43:27Z [verbose] Readiness Indicator file check\\\\n2026-01-26T10:44:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:44:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.660454 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:22Z\\\",\\\"message\\\":\\\"erator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 10:44:22.293447 7055 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-operator per-node LB for network=default: []services.LB{}\\\\nI0126 10:44:22.293456 7055 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-operator template LB for network=default: []services.LB{}\\\\nI0126 10:44:22.293464 7055 services_controller.go:454] Service openshift-machine-config-operator/machine-config-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI0126 10:44:22.293443 7055 services_controller.go:451] Built service openshift-kube-storage-version-migrator-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:44:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.672685 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:24Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.684679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.684711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.684720 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.684735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.684744 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.786922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.786971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.786983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.787001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.787029 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.889790 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.889845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.889855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.889875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.889889 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.992214 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.992616 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.992722 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.992833 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.992951 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:24Z","lastTransitionTime":"2026-01-26T10:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:24 crc kubenswrapper[5003]: I0126 10:44:24.998853 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 04:00:45.829604764 +0000 UTC Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.001088 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:25 crc kubenswrapper[5003]: E0126 10:44:25.001544 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.020155 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.034047 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.046336 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.060346 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.074735 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:12Z\\\",\\\"message\\\":\\\"2026-01-26T10:43:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25\\\\n2026-01-26T10:43:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25 to /host/opt/cni/bin/\\\\n2026-01-26T10:43:27Z [verbose] multus-daemon started\\\\n2026-01-26T10:43:27Z [verbose] Readiness Indicator file check\\\\n2026-01-26T10:44:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:44:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.093111 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:22Z\\\",\\\"message\\\":\\\"erator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 10:44:22.293447 7055 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-operator per-node LB for network=default: []services.LB{}\\\\nI0126 10:44:22.293456 7055 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-operator template LB for network=default: []services.LB{}\\\\nI0126 10:44:22.293464 7055 services_controller.go:454] Service openshift-machine-config-operator/machine-config-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI0126 10:44:22.293443 7055 services_controller.go:451] Built service openshift-kube-storage-version-migrator-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:44:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.094672 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.094720 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.094733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.094753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.094766 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.102745 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5fbe8df-fa5b-47cb-ae08-30e7d8de3a8d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a3c9c6bc10414e0ee33459ed2d373ebd6153bc455c01fb47ca394b17695acdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.116948 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.126767 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.141138 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.150986 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.161727 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.172791 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.184990 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.197488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.197558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.197573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.197592 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.197603 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.198321 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.210427 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.224275 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.237092 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.255308 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:25Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.300927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.300969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.300978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.300994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.301027 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.403591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.403912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.403925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.403943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.403956 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.506412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.506450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.506458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.506471 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.506480 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.609124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.609395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.609411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.609429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.609441 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.710984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.711016 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.711025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.711041 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.711051 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.813567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.813608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.813616 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.813629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.813638 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.915964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.916055 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.916074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.916102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.916120 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:25Z","lastTransitionTime":"2026-01-26T10:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:25 crc kubenswrapper[5003]: I0126 10:44:25.999699 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 13:51:38.087848746 +0000 UTC Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.000949 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:26 crc kubenswrapper[5003]: E0126 10:44:26.001090 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.001188 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:26 crc kubenswrapper[5003]: E0126 10:44:26.001443 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.001790 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:26 crc kubenswrapper[5003]: E0126 10:44:26.001926 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.018870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.018905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.018918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.018941 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.018963 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.121083 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.121140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.121155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.121175 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.121189 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.224381 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.224439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.224450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.224470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.224485 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.327527 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.327586 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.327597 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.327620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.327636 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.430703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.430768 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.430778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.430800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.430816 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.533561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.533622 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.533640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.533663 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.533680 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.636525 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.636590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.636598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.636617 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.636626 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.740046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.740097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.740110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.740135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.740151 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.842617 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.842686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.842703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.842730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.842748 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.945782 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.945858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.945895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.945924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:26 crc kubenswrapper[5003]: I0126 10:44:26.945947 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:26Z","lastTransitionTime":"2026-01-26T10:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.000914 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:06:44.761630769 +0000 UTC Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.001095 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:27 crc kubenswrapper[5003]: E0126 10:44:27.001262 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.049428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.049488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.049507 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.049527 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.049541 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.152726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.152785 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.152796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.152813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.152825 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.255713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.255753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.255763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.255777 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.255785 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.358482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.358524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.358533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.358547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.358557 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.461735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.461806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.461822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.461848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.461864 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.564850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.564919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.564939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.564964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.564982 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.668091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.668148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.668163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.668181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.668193 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.771019 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.771076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.771091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.771111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.771126 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.873552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.873601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.873614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.873630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.873641 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.975952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.976022 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.976034 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.976050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:27 crc kubenswrapper[5003]: I0126 10:44:27.976061 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:27Z","lastTransitionTime":"2026-01-26T10:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.000688 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.000803 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.000935 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.001135 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.001205 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.001373 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.001607 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 22:33:21.189027905 +0000 UTC Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.074235 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.074398 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.074458 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.074424752 +0000 UTC m=+147.615650313 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.074506 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.074570 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.074553546 +0000 UTC m=+147.615779147 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.074593 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.074684 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.074717 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.07470841 +0000 UTC m=+147.615934051 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.078959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.079025 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.079049 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.079078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.079112 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.175755 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.175825 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.175903 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.175903 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.175933 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.175947 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.175997 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.175981542 +0000 UTC m=+147.717207113 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.175918 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.176015 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:44:28 crc kubenswrapper[5003]: E0126 10:44:28.176042 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.176032403 +0000 UTC m=+147.717257954 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.180982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.181011 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.181021 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.181034 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.181044 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.283451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.283492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.283503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.283520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.283532 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.385642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.385669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.385676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.385689 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.385697 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.491958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.492016 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.492034 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.492062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.492077 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.594718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.594811 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.594822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.594851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.594862 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.697477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.697526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.697540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.697560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.697574 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.800422 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.800495 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.800510 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.800526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.800538 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.903198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.903237 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.903248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.903265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:28 crc kubenswrapper[5003]: I0126 10:44:28.903302 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:28Z","lastTransitionTime":"2026-01-26T10:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.001642 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.001725 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 04:26:49.26338745 +0000 UTC Jan 26 10:44:29 crc kubenswrapper[5003]: E0126 10:44:29.001911 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.005744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.005789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.005802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.005816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.005828 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.107762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.107804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.107817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.107834 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.107847 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.210237 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.210310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.210328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.210350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.210363 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.312391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.312424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.312432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.312447 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.312455 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.414581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.414642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.414667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.414696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.414717 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.517512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.517564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.517576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.517594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.517606 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.618374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.618440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.618453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.618471 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.618484 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: E0126 10:44:29.630941 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.634189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.634242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.634256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.634347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.634397 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: E0126 10:44:29.646959 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.651493 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.651558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.651578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.651603 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.651619 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: E0126 10:44:29.663446 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.667047 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.667086 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.667096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.667109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.667118 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: E0126 10:44:29.678610 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.682505 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.682562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.682579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.682603 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.682620 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: E0126 10:44:29.699660 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6e521106-5ceb-4879-a461-45cda76aa109\\\",\\\"systemUUID\\\":\\\"587875d7-ac1e-443a-baca-4a26e90f0b87\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:29Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:29 crc kubenswrapper[5003]: E0126 10:44:29.699989 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.702135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.702179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.702193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.702212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.702224 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.804741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.804786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.804798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.804814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.804826 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.908182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.908234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.908243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.908260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:29 crc kubenswrapper[5003]: I0126 10:44:29.908269 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:29Z","lastTransitionTime":"2026-01-26T10:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.001437 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.001551 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.001437 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:30 crc kubenswrapper[5003]: E0126 10:44:30.001709 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:30 crc kubenswrapper[5003]: E0126 10:44:30.001843 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.001852 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 19:21:15.328739883 +0000 UTC Jan 26 10:44:30 crc kubenswrapper[5003]: E0126 10:44:30.002020 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.011243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.011342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.011367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.011396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.011420 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.113629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.113671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.113681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.113697 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.113707 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.216328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.216410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.216436 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.216467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.216491 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.319243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.319348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.319369 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.319408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.319434 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.422353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.422383 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.422392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.422404 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.422412 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.524824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.524858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.524870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.524886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.524897 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.627825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.627867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.627900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.627917 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.627928 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.730538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.730608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.730635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.730663 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.730687 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.834543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.834594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.834606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.834628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.834639 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.937162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.937210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.937222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.937238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:30 crc kubenswrapper[5003]: I0126 10:44:30.937263 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:30Z","lastTransitionTime":"2026-01-26T10:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.001129 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:31 crc kubenswrapper[5003]: E0126 10:44:31.001418 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.002802 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 12:53:25.29385757 +0000 UTC Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.039588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.039622 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.039654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.039671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.039680 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.141909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.141962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.141980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.142001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.142019 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.244709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.244759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.244775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.244797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.244814 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.347195 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.347234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.347243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.347257 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.347266 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.450167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.450220 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.450232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.450250 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.450265 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.553556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.553611 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.553621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.553641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.553654 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.655532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.655574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.655585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.655601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.655614 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.757578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.757612 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.757622 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.757634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.757644 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.860922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.860978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.860994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.861026 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.861044 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.963700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.963756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.963769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.963786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:31 crc kubenswrapper[5003]: I0126 10:44:31.963797 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:31Z","lastTransitionTime":"2026-01-26T10:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.000733 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.000793 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.000880 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:32 crc kubenswrapper[5003]: E0126 10:44:32.001043 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:32 crc kubenswrapper[5003]: E0126 10:44:32.001227 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:32 crc kubenswrapper[5003]: E0126 10:44:32.001267 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.002919 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 14:13:54.579802193 +0000 UTC Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.066692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.066738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.066752 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.066767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.066779 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.168880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.168910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.168922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.168938 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.168950 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.271607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.271661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.271676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.271693 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.271706 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.374843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.374894 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.374909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.374930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.374945 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.478685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.478744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.478758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.478774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.478785 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.581915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.581959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.581973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.581987 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.581998 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.684755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.684802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.684812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.684829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.684839 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.787675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.787733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.787745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.787761 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.787773 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.889553 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.889582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.889589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.889604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.889612 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.992173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.992230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.992246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.992266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:32 crc kubenswrapper[5003]: I0126 10:44:32.992303 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:32Z","lastTransitionTime":"2026-01-26T10:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.001697 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:33 crc kubenswrapper[5003]: E0126 10:44:33.001835 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.003542 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 13:26:40.730325052 +0000 UTC Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.095027 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.095093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.095102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.095118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.095128 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.197689 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.197745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.197754 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.197784 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.197795 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.300524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.300575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.300585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.300600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.300608 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.403237 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.403316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.403326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.403338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.403348 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.505856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.505899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.505910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.505925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.505935 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.608083 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.608118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.608128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.608145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.608156 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.710695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.710745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.710756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.710771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.710781 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.813494 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.813559 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.813571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.813588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.813600 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.916136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.916190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.916202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.916218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:33 crc kubenswrapper[5003]: I0126 10:44:33.916230 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:33Z","lastTransitionTime":"2026-01-26T10:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.001537 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.001795 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:34 crc kubenswrapper[5003]: E0126 10:44:34.001958 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.002258 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:34 crc kubenswrapper[5003]: E0126 10:44:34.002452 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:34 crc kubenswrapper[5003]: E0126 10:44:34.002644 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.004107 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 23:32:49.327266452 +0000 UTC Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.019797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.019855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.019868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.019890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.019931 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.123077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.123155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.123171 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.123196 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.123212 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.225596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.225659 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.225670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.225686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.225717 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.328606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.328638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.328647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.328660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.328669 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.432392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.432444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.432457 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.432488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.432504 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.534960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.535499 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.535576 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.535650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.535726 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.639264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.639347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.639360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.639379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.639389 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.741751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.741787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.741798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.741810 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.741819 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.844310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.844368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.844379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.844394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.844404 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.948042 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.948081 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.948094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.948164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:34 crc kubenswrapper[5003]: I0126 10:44:34.948180 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:34Z","lastTransitionTime":"2026-01-26T10:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.000917 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:35 crc kubenswrapper[5003]: E0126 10:44:35.001090 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.004272 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 01:49:06.63579509 +0000 UTC Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.016349 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de168e2a-6762-4792-8f48-4c754032f74a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.028136 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50efe42dea1890b42091d0a7eaa0f62327c1087c053c38256e26256090e4fec2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://034125f8c9b16a029ce5c9544a9cb4b8d8747a427a032264e700c23e49b06f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.037576 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eac8a62-4b57-4423-b3b6-5d62047182f3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e29a3746f285efd57f60fafc32c5842c9b6509264e0c3385c5e1b5600231f047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98b5af77054fb64a506432cbe6801e93888383a6288532432b7217b7b76c5cea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdjlm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s4f8s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.046979 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h5z9l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4jrnq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.050684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.050721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.050732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.050749 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.050764 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.058510 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1334a476-a994-4656-b652-2725aca85019\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6579c2bbc7975da1ae81aead20d3e67324f66ab20f3fe0fcb129a5b639f6e483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://958f3050b1d86a17729120448765345d94145d597efd3ec7703101a688920f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2756e375728e6a3e7f4470e3049e06d529c5768c232a15cd7f5f7ba81d0c8e4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.069071 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef6c99048be2d0cfc98ef20b8559ceaf44491ffebbc5a8f363117a1c7a0b3c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.081653 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.095058 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.108832 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.118997 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xfz4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c23d8cc-d7a8-4d19-b9cc-a54ed2bf0b05\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81a673ac45c8bce5e09aa400e29966d2fa127d94716fbcbb4601193bd912e66d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j74r8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xfz4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.140816 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa5a15f1-a856-4287-9620-27c812d2b558\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://752a0510d41c818e6d7907270c71b87bc36855ff23286dd577922d6e276c2eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77952c64fd82e7aa02e52f6a8a0377e52d5617f5978538e76e80f10840aeca8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4290336686a8288215c61180ba8167ba15a03673d768d9555c49d56d05a523e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4f1a4543ab9ad1381c2334d23c6703f21c1021ff919fc81da5844f623484d54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3586db926574a734a2886c8c72feb8e243200000cb6e57d9c05a6e940e1f0325\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://104b5544f562a3f571c76299ea636c881232944b273db82554b2b0e9600c65e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95986f23d110793fbe3e626cfebc938359d8e8a7d4c567827dfaf94f298057f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://264873b760934ce1ad3534905308e8129c1730e1a4b720772e9f842ad6e9241a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.153392 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdcbe5cdd8cad54583ce64bd129d9565440fda9bc12039d3e58f8fd642be5d95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.154447 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.154577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.154591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.154609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.155101 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.165196 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x7zz5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6bd79f7d-ecd0-490f-af95-e995c1f3c052\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://214e1766c616a200e22f7c6433ad6c9676c0bff3a3f86578a714063f0010e4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6t4tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x7zz5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.174005 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25cce3e50f3c035fb087bab97863a57a95a61a49b28da218404f6b223d6b67b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gmzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m84kp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.187947 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a045c900-bcf1-42b6-894a-10c38739cf92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f74daac1b5987191a8aec42f28dc59180aaa2033b31d0c4341b0938e473fe2f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbf9b2ae28502da127e3b7a37be7a9251c712f200f83d4c538ef0a28ac8d109\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ee1939de5d44fded8bf566f06f8dab4cf063dd4f4d9764b56dbb8d237070eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c743564e23b4bdba592ff34a9b718145b600c2f742ccac2c701db535bc80514b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cae592ad79d44e738e41748a5214e72ddc016b0477c37633e0984456a670d083\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://203bee66589e6858e66aa18a3e00c4cd0bff32e3eeffa8e0b86669bf6c34a35c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eed15ec9192e210fa73534a7950570183802999d817fb359f7975dcd6006d85b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-77fgj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x9nkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.200091 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2a5d08-c449-45c6-8e1f-340c076422db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:44:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:12Z\\\",\\\"message\\\":\\\"2026-01-26T10:43:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25\\\\n2026-01-26T10:43:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_07bfdeb5-2f91-4361-a75a-4b768c1ada25 to /host/opt/cni/bin/\\\\n2026-01-26T10:43:27Z [verbose] multus-daemon started\\\\n2026-01-26T10:43:27Z [verbose] Readiness Indicator file check\\\\n2026-01-26T10:44:12Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:44:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kk74\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.218373 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a98683-f9ac-45d4-9312-43ebf25bdb52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T10:44:22Z\\\",\\\"message\\\":\\\"erator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 10:44:22.293447 7055 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-operator per-node LB for network=default: []services.LB{}\\\\nI0126 10:44:22.293456 7055 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-operator template LB for network=default: []services.LB{}\\\\nI0126 10:44:22.293464 7055 services_controller.go:454] Service openshift-machine-config-operator/machine-config-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI0126 10:44:22.293443 7055 services_controller.go:451] Built service openshift-kube-storage-version-migrator-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T10:44:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xp458\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q24zl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.229039 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5fbe8df-fa5b-47cb-ae08-30e7d8de3a8d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a3c9c6bc10414e0ee33459ed2d373ebd6153bc455c01fb47ca394b17695acdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7d7272571d0c28d942b9bd1fad1759a627cc5e39e2b7ee804863f097855e697\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.240453 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2977ac6-0892-467f-b265-16c14d9c63a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d73b095e2001015587e06ebb41c1460ef946ddc2dd47f2c893ee82fe6353370b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://485f8dd99739834234edd418f9fc2fa8a50c854bf08a088fafb8ac6814dca1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2a532ad0f92d0c2284fcd0c52cac0b5187dacaccccf2fc361fab4173436c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T10:43:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c0f56e5d83ce7fcbde9c0503292ed1f242b2c1f535b471974ca3662ba6933e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T10:43:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T10:43:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T10:43:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T10:44:35Z is after 2025-08-24T17:21:41Z" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.257429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.257464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.257476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.257493 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.257504 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.360410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.360451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.360461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.360478 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.360489 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.462316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.462396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.462433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.462464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.462486 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.564639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.564713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.564741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.564774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.564794 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.667278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.667383 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.667405 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.667434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.667456 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.769985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.770022 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.770033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.770048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.770057 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.872613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.872662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.872674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.872691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.872705 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.975669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.975730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.975746 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.975766 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:35 crc kubenswrapper[5003]: I0126 10:44:35.975781 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:35Z","lastTransitionTime":"2026-01-26T10:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.001568 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.001608 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.001582 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:36 crc kubenswrapper[5003]: E0126 10:44:36.001693 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:36 crc kubenswrapper[5003]: E0126 10:44:36.001773 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:36 crc kubenswrapper[5003]: E0126 10:44:36.001897 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.004643 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 09:33:08.600009978 +0000 UTC Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.078099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.078172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.078186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.078201 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.078213 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.179955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.179994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.180004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.180018 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.180031 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.282367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.282407 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.282419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.282434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.282444 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.385239 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.385325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.385339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.385355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.385368 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.488232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.488337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.488350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.488367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.488379 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.591506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.591597 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.591615 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.591636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.591653 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.693827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.693862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.693871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.693884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.693895 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.796378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.796418 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.796427 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.796441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.796451 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.899262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.899346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.899358 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.899396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:36 crc kubenswrapper[5003]: I0126 10:44:36.899409 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:36Z","lastTransitionTime":"2026-01-26T10:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.000885 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:37 crc kubenswrapper[5003]: E0126 10:44:37.001524 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.002867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.002906 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.002915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.002930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.002939 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.005040 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 00:31:05.58255587 +0000 UTC Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.105245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.105308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.105324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.105343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.105357 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.207695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.208206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.208242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.208268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.208311 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.310518 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.310649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.310668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.310684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.310723 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.414205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.414249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.414259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.414294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.414307 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.517244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.517304 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.517314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.517330 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.517344 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.620472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.620521 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.620532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.620554 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.620567 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.723171 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.723214 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.723223 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.723238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.723249 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.825495 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.825533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.825541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.825556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.825564 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.928016 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.928053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.928062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.928074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:37 crc kubenswrapper[5003]: I0126 10:44:37.928083 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:37Z","lastTransitionTime":"2026-01-26T10:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.001133 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.001226 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.001514 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:38 crc kubenswrapper[5003]: E0126 10:44:38.001616 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:38 crc kubenswrapper[5003]: E0126 10:44:38.001684 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:38 crc kubenswrapper[5003]: E0126 10:44:38.002240 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.003185 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:44:38 crc kubenswrapper[5003]: E0126 10:44:38.003489 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.005502 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 22:27:29.438255826 +0000 UTC Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.030642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.030679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.030688 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.030703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.030713 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.133965 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.134026 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.134039 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.134059 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.134072 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.236580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.236619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.236629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.236648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.236660 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.339225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.339312 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.339331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.339353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.339368 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.441310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.441361 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.441377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.441397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.441415 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.544086 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.544146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.544158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.544173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.544182 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.647023 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.647069 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.647078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.647094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.647103 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.749439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.749479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.749490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.749504 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.749515 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.852205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.852316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.852342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.852371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.852398 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.954995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.955039 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.955051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.955066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:38 crc kubenswrapper[5003]: I0126 10:44:38.955077 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:38Z","lastTransitionTime":"2026-01-26T10:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.001474 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:39 crc kubenswrapper[5003]: E0126 10:44:39.001594 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.006170 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 10:30:47.744994438 +0000 UTC Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.060068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.060113 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.060216 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.060236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.060245 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:39Z","lastTransitionTime":"2026-01-26T10:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.163640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.163685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.163699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.163716 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.163728 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:39Z","lastTransitionTime":"2026-01-26T10:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.267444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.267507 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.267524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.267546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.267561 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:39Z","lastTransitionTime":"2026-01-26T10:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.369979 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.370227 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.370365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.370467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.370551 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:39Z","lastTransitionTime":"2026-01-26T10:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.472889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.473182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.473344 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.473513 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.473617 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:39Z","lastTransitionTime":"2026-01-26T10:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.576176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.576215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.576225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.576239 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.576248 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:39Z","lastTransitionTime":"2026-01-26T10:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.680317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.680391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.680411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.680438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.680455 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:39Z","lastTransitionTime":"2026-01-26T10:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.760548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.760658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.760684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.760718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.760741 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T10:44:39Z","lastTransitionTime":"2026-01-26T10:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.804140 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7"] Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.804736 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.806726 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.806789 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.806832 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.807022 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.824309 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=77.824261896 podStartE2EDuration="1m17.824261896s" podCreationTimestamp="2026-01-26 10:43:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:39.823835704 +0000 UTC m=+95.365061275" watchObservedRunningTime="2026-01-26 10:44:39.824261896 +0000 UTC m=+95.365487467" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.850200 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=75.850185365 podStartE2EDuration="1m15.850185365s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:39.836063404 +0000 UTC m=+95.377288965" watchObservedRunningTime="2026-01-26 10:44:39.850185365 +0000 UTC m=+95.391410936" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.876319 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s4f8s" podStartSLOduration=74.87630339 podStartE2EDuration="1m14.87630339s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:39.875843978 +0000 UTC m=+95.417069539" watchObservedRunningTime="2026-01-26 10:44:39.87630339 +0000 UTC m=+95.417528951" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.897386 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/af05daee-92ca-47c3-b43a-fbc333e2993a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.897437 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af05daee-92ca-47c3-b43a-fbc333e2993a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.897477 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/af05daee-92ca-47c3-b43a-fbc333e2993a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.897500 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af05daee-92ca-47c3-b43a-fbc333e2993a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.897531 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/af05daee-92ca-47c3-b43a-fbc333e2993a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.933096 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=77.933073642 podStartE2EDuration="1m17.933073642s" podCreationTimestamp="2026-01-26 10:43:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:39.917128441 +0000 UTC m=+95.458354012" watchObservedRunningTime="2026-01-26 10:44:39.933073642 +0000 UTC m=+95.474299203" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.992048 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-xfz4f" podStartSLOduration=75.992009312 podStartE2EDuration="1m15.992009312s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:39.991679633 +0000 UTC m=+95.532905194" watchObservedRunningTime="2026-01-26 10:44:39.992009312 +0000 UTC m=+95.533234873" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.998565 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af05daee-92ca-47c3-b43a-fbc333e2993a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.998621 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/af05daee-92ca-47c3-b43a-fbc333e2993a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.998643 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af05daee-92ca-47c3-b43a-fbc333e2993a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.998678 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/af05daee-92ca-47c3-b43a-fbc333e2993a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.998718 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/af05daee-92ca-47c3-b43a-fbc333e2993a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.998791 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/af05daee-92ca-47c3-b43a-fbc333e2993a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:39 crc kubenswrapper[5003]: I0126 10:44:39.998928 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/af05daee-92ca-47c3-b43a-fbc333e2993a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.000163 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/af05daee-92ca-47c3-b43a-fbc333e2993a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.000691 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.000736 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:40 crc kubenswrapper[5003]: E0126 10:44:40.000792 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:40 crc kubenswrapper[5003]: E0126 10:44:40.000883 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.000955 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:40 crc kubenswrapper[5003]: E0126 10:44:40.001013 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.004649 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af05daee-92ca-47c3-b43a-fbc333e2993a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.007139 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 17:18:12.873943316 +0000 UTC Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.007214 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.018874 5003 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.023302 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af05daee-92ca-47c3-b43a-fbc333e2993a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bprk7\" (UID: \"af05daee-92ca-47c3-b43a-fbc333e2993a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.064335 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=43.064317973 podStartE2EDuration="43.064317973s" podCreationTimestamp="2026-01-26 10:43:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:40.062803682 +0000 UTC m=+95.604029243" watchObservedRunningTime="2026-01-26 10:44:40.064317973 +0000 UTC m=+95.605543534" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.064442 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=22.064438016 podStartE2EDuration="22.064438016s" podCreationTimestamp="2026-01-26 10:44:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:40.050641584 +0000 UTC m=+95.591867135" watchObservedRunningTime="2026-01-26 10:44:40.064438016 +0000 UTC m=+95.605663577" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.093545 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-x7zz5" podStartSLOduration=76.093525641 podStartE2EDuration="1m16.093525641s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:40.076644605 +0000 UTC m=+95.617870176" watchObservedRunningTime="2026-01-26 10:44:40.093525641 +0000 UTC m=+95.634751202" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.109879 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podStartSLOduration=76.109861011 podStartE2EDuration="1m16.109861011s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:40.094500927 +0000 UTC m=+95.635726488" watchObservedRunningTime="2026-01-26 10:44:40.109861011 +0000 UTC m=+95.651086572" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.119635 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.132722 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-x9nkb" podStartSLOduration=76.132701868 podStartE2EDuration="1m16.132701868s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:40.110215171 +0000 UTC m=+95.651440742" watchObservedRunningTime="2026-01-26 10:44:40.132701868 +0000 UTC m=+95.673927429" Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.135864 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-vpb6l" podStartSLOduration=76.135688488 podStartE2EDuration="1m16.135688488s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:40.132087711 +0000 UTC m=+95.673313282" watchObservedRunningTime="2026-01-26 10:44:40.135688488 +0000 UTC m=+95.676914049" Jan 26 10:44:40 crc kubenswrapper[5003]: W0126 10:44:40.138756 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf05daee_92ca_47c3_b43a_fbc333e2993a.slice/crio-d3e02b072b12f53dae991a487a6f3c392efade6185bcce3faef7ad5fe83f311d WatchSource:0}: Error finding container d3e02b072b12f53dae991a487a6f3c392efade6185bcce3faef7ad5fe83f311d: Status 404 returned error can't find the container with id d3e02b072b12f53dae991a487a6f3c392efade6185bcce3faef7ad5fe83f311d Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.468014 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" event={"ID":"af05daee-92ca-47c3-b43a-fbc333e2993a","Type":"ContainerStarted","Data":"9bf88888e15075f112d59ee7e3b5b3572d640fa57ef5db3c8e35a5d70d0c3d67"} Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.468344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" event={"ID":"af05daee-92ca-47c3-b43a-fbc333e2993a","Type":"ContainerStarted","Data":"d3e02b072b12f53dae991a487a6f3c392efade6185bcce3faef7ad5fe83f311d"} Jan 26 10:44:40 crc kubenswrapper[5003]: I0126 10:44:40.484424 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bprk7" podStartSLOduration=76.484405196 podStartE2EDuration="1m16.484405196s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:44:40.483852381 +0000 UTC m=+96.025077982" watchObservedRunningTime="2026-01-26 10:44:40.484405196 +0000 UTC m=+96.025630757" Jan 26 10:44:41 crc kubenswrapper[5003]: I0126 10:44:41.001172 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:41 crc kubenswrapper[5003]: E0126 10:44:41.001388 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:42 crc kubenswrapper[5003]: I0126 10:44:42.001568 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:42 crc kubenswrapper[5003]: E0126 10:44:42.001707 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:42 crc kubenswrapper[5003]: I0126 10:44:42.001586 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:42 crc kubenswrapper[5003]: I0126 10:44:42.001564 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:42 crc kubenswrapper[5003]: E0126 10:44:42.001799 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:42 crc kubenswrapper[5003]: E0126 10:44:42.001974 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:43 crc kubenswrapper[5003]: I0126 10:44:43.001458 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:43 crc kubenswrapper[5003]: E0126 10:44:43.001699 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:43 crc kubenswrapper[5003]: I0126 10:44:43.233881 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:43 crc kubenswrapper[5003]: E0126 10:44:43.234090 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:44:43 crc kubenswrapper[5003]: E0126 10:44:43.234221 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs podName:aa06185d-fe5e-423a-b5a7-19e8bb7c8a60 nodeName:}" failed. No retries permitted until 2026-01-26 10:45:47.234190004 +0000 UTC m=+162.775415595 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs") pod "network-metrics-daemon-4jrnq" (UID: "aa06185d-fe5e-423a-b5a7-19e8bb7c8a60") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 10:44:44 crc kubenswrapper[5003]: I0126 10:44:44.001073 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:44 crc kubenswrapper[5003]: I0126 10:44:44.001133 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:44 crc kubenswrapper[5003]: I0126 10:44:44.001077 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:44 crc kubenswrapper[5003]: E0126 10:44:44.001218 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:44 crc kubenswrapper[5003]: E0126 10:44:44.001347 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:44 crc kubenswrapper[5003]: E0126 10:44:44.001417 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:45 crc kubenswrapper[5003]: I0126 10:44:45.001272 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:45 crc kubenswrapper[5003]: E0126 10:44:45.010197 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:46 crc kubenswrapper[5003]: I0126 10:44:46.001021 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:46 crc kubenswrapper[5003]: I0126 10:44:46.001047 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:46 crc kubenswrapper[5003]: I0126 10:44:46.001033 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:46 crc kubenswrapper[5003]: E0126 10:44:46.001150 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:46 crc kubenswrapper[5003]: E0126 10:44:46.001302 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:46 crc kubenswrapper[5003]: E0126 10:44:46.001380 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:47 crc kubenswrapper[5003]: I0126 10:44:47.001544 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:47 crc kubenswrapper[5003]: E0126 10:44:47.001703 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:48 crc kubenswrapper[5003]: I0126 10:44:48.000869 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:48 crc kubenswrapper[5003]: I0126 10:44:48.000995 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:48 crc kubenswrapper[5003]: E0126 10:44:48.001001 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:48 crc kubenswrapper[5003]: I0126 10:44:48.001095 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:48 crc kubenswrapper[5003]: E0126 10:44:48.001103 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:48 crc kubenswrapper[5003]: E0126 10:44:48.001386 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:49 crc kubenswrapper[5003]: I0126 10:44:49.000759 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:49 crc kubenswrapper[5003]: E0126 10:44:49.000924 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:50 crc kubenswrapper[5003]: I0126 10:44:50.001748 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:50 crc kubenswrapper[5003]: I0126 10:44:50.001813 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:50 crc kubenswrapper[5003]: I0126 10:44:50.001867 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:50 crc kubenswrapper[5003]: E0126 10:44:50.001955 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:50 crc kubenswrapper[5003]: E0126 10:44:50.002038 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:50 crc kubenswrapper[5003]: E0126 10:44:50.002556 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:50 crc kubenswrapper[5003]: I0126 10:44:50.002855 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:44:50 crc kubenswrapper[5003]: E0126 10:44:50.003060 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:44:51 crc kubenswrapper[5003]: I0126 10:44:51.000794 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:51 crc kubenswrapper[5003]: E0126 10:44:51.000940 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:52 crc kubenswrapper[5003]: I0126 10:44:52.001402 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:52 crc kubenswrapper[5003]: I0126 10:44:52.001428 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:52 crc kubenswrapper[5003]: E0126 10:44:52.001524 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:52 crc kubenswrapper[5003]: I0126 10:44:52.001410 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:52 crc kubenswrapper[5003]: E0126 10:44:52.001644 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:52 crc kubenswrapper[5003]: E0126 10:44:52.001704 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:53 crc kubenswrapper[5003]: I0126 10:44:53.000772 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:53 crc kubenswrapper[5003]: E0126 10:44:53.000994 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:54 crc kubenswrapper[5003]: I0126 10:44:54.001213 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:54 crc kubenswrapper[5003]: I0126 10:44:54.001227 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:54 crc kubenswrapper[5003]: I0126 10:44:54.001403 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:54 crc kubenswrapper[5003]: E0126 10:44:54.001518 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:54 crc kubenswrapper[5003]: E0126 10:44:54.001652 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:54 crc kubenswrapper[5003]: E0126 10:44:54.001762 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:55 crc kubenswrapper[5003]: I0126 10:44:55.001664 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:55 crc kubenswrapper[5003]: E0126 10:44:55.002697 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:56 crc kubenswrapper[5003]: I0126 10:44:56.001159 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:56 crc kubenswrapper[5003]: I0126 10:44:56.001164 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:56 crc kubenswrapper[5003]: I0126 10:44:56.001144 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:56 crc kubenswrapper[5003]: E0126 10:44:56.001340 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:56 crc kubenswrapper[5003]: E0126 10:44:56.001858 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:56 crc kubenswrapper[5003]: E0126 10:44:56.002091 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:57 crc kubenswrapper[5003]: I0126 10:44:57.005388 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:57 crc kubenswrapper[5003]: E0126 10:44:57.005570 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:58 crc kubenswrapper[5003]: I0126 10:44:58.001859 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:44:58 crc kubenswrapper[5003]: I0126 10:44:58.001870 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:44:58 crc kubenswrapper[5003]: E0126 10:44:58.002061 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:44:58 crc kubenswrapper[5003]: I0126 10:44:58.002123 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:44:58 crc kubenswrapper[5003]: E0126 10:44:58.002167 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:44:58 crc kubenswrapper[5003]: E0126 10:44:58.002550 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:44:59 crc kubenswrapper[5003]: I0126 10:44:59.001569 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:44:59 crc kubenswrapper[5003]: E0126 10:44:59.001755 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:44:59 crc kubenswrapper[5003]: I0126 10:44:59.529694 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/1.log" Jan 26 10:44:59 crc kubenswrapper[5003]: I0126 10:44:59.530482 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/0.log" Jan 26 10:44:59 crc kubenswrapper[5003]: I0126 10:44:59.530556 5003 generic.go:334] "Generic (PLEG): container finished" podID="9a2a5d08-c449-45c6-8e1f-340c076422db" containerID="385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f" exitCode=1 Jan 26 10:44:59 crc kubenswrapper[5003]: I0126 10:44:59.530606 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpb6l" event={"ID":"9a2a5d08-c449-45c6-8e1f-340c076422db","Type":"ContainerDied","Data":"385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f"} Jan 26 10:44:59 crc kubenswrapper[5003]: I0126 10:44:59.530665 5003 scope.go:117] "RemoveContainer" containerID="432555d30db3e21cb14481bac7620cae5d6e6b33c71fddfa2891d0b9cb6d01a4" Jan 26 10:44:59 crc kubenswrapper[5003]: I0126 10:44:59.531664 5003 scope.go:117] "RemoveContainer" containerID="385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f" Jan 26 10:44:59 crc kubenswrapper[5003]: E0126 10:44:59.532352 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-vpb6l_openshift-multus(9a2a5d08-c449-45c6-8e1f-340c076422db)\"" pod="openshift-multus/multus-vpb6l" podUID="9a2a5d08-c449-45c6-8e1f-340c076422db" Jan 26 10:45:00 crc kubenswrapper[5003]: I0126 10:45:00.000987 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:00 crc kubenswrapper[5003]: I0126 10:45:00.001069 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:00 crc kubenswrapper[5003]: E0126 10:45:00.001127 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:00 crc kubenswrapper[5003]: I0126 10:45:00.001186 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:00 crc kubenswrapper[5003]: E0126 10:45:00.001375 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:00 crc kubenswrapper[5003]: E0126 10:45:00.001489 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:00 crc kubenswrapper[5003]: I0126 10:45:00.538016 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/1.log" Jan 26 10:45:01 crc kubenswrapper[5003]: I0126 10:45:01.000680 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:01 crc kubenswrapper[5003]: E0126 10:45:01.000950 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:01 crc kubenswrapper[5003]: I0126 10:45:01.002789 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:45:01 crc kubenswrapper[5003]: E0126 10:45:01.003386 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q24zl_openshift-ovn-kubernetes(f9a98683-f9ac-45d4-9312-43ebf25bdb52)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" Jan 26 10:45:02 crc kubenswrapper[5003]: I0126 10:45:02.001091 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:02 crc kubenswrapper[5003]: I0126 10:45:02.001129 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:02 crc kubenswrapper[5003]: E0126 10:45:02.001228 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:02 crc kubenswrapper[5003]: I0126 10:45:02.001254 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:02 crc kubenswrapper[5003]: E0126 10:45:02.001616 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:02 crc kubenswrapper[5003]: E0126 10:45:02.001699 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:03 crc kubenswrapper[5003]: I0126 10:45:03.001932 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:03 crc kubenswrapper[5003]: E0126 10:45:03.002278 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:04 crc kubenswrapper[5003]: I0126 10:45:04.001637 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:04 crc kubenswrapper[5003]: E0126 10:45:04.001795 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:04 crc kubenswrapper[5003]: I0126 10:45:04.002036 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:04 crc kubenswrapper[5003]: E0126 10:45:04.002132 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:04 crc kubenswrapper[5003]: I0126 10:45:04.002174 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:04 crc kubenswrapper[5003]: E0126 10:45:04.002340 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:05 crc kubenswrapper[5003]: I0126 10:45:05.000952 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:05 crc kubenswrapper[5003]: E0126 10:45:05.003505 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:05 crc kubenswrapper[5003]: E0126 10:45:05.003576 5003 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 26 10:45:05 crc kubenswrapper[5003]: E0126 10:45:05.086698 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 10:45:06 crc kubenswrapper[5003]: I0126 10:45:06.001098 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:06 crc kubenswrapper[5003]: I0126 10:45:06.001174 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:06 crc kubenswrapper[5003]: E0126 10:45:06.001318 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:06 crc kubenswrapper[5003]: E0126 10:45:06.001445 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:06 crc kubenswrapper[5003]: I0126 10:45:06.001536 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:06 crc kubenswrapper[5003]: E0126 10:45:06.001670 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:07 crc kubenswrapper[5003]: I0126 10:45:07.001496 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:07 crc kubenswrapper[5003]: E0126 10:45:07.002569 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:08 crc kubenswrapper[5003]: I0126 10:45:08.000912 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:08 crc kubenswrapper[5003]: I0126 10:45:08.001001 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:08 crc kubenswrapper[5003]: E0126 10:45:08.001038 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:08 crc kubenswrapper[5003]: I0126 10:45:08.001092 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:08 crc kubenswrapper[5003]: E0126 10:45:08.001192 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:08 crc kubenswrapper[5003]: E0126 10:45:08.001254 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:09 crc kubenswrapper[5003]: I0126 10:45:09.001632 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:09 crc kubenswrapper[5003]: E0126 10:45:09.001823 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:10 crc kubenswrapper[5003]: I0126 10:45:10.001415 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:10 crc kubenswrapper[5003]: I0126 10:45:10.001486 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:10 crc kubenswrapper[5003]: I0126 10:45:10.001423 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:10 crc kubenswrapper[5003]: E0126 10:45:10.001586 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:10 crc kubenswrapper[5003]: E0126 10:45:10.001722 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:10 crc kubenswrapper[5003]: E0126 10:45:10.001870 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:10 crc kubenswrapper[5003]: E0126 10:45:10.088844 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 10:45:11 crc kubenswrapper[5003]: I0126 10:45:11.001069 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:11 crc kubenswrapper[5003]: E0126 10:45:11.001270 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:12 crc kubenswrapper[5003]: I0126 10:45:12.001469 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:12 crc kubenswrapper[5003]: I0126 10:45:12.001468 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:12 crc kubenswrapper[5003]: I0126 10:45:12.001636 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:12 crc kubenswrapper[5003]: E0126 10:45:12.001825 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:12 crc kubenswrapper[5003]: E0126 10:45:12.001906 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:12 crc kubenswrapper[5003]: I0126 10:45:12.001936 5003 scope.go:117] "RemoveContainer" containerID="385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f" Jan 26 10:45:12 crc kubenswrapper[5003]: E0126 10:45:12.001971 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:12 crc kubenswrapper[5003]: I0126 10:45:12.582374 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/1.log" Jan 26 10:45:12 crc kubenswrapper[5003]: I0126 10:45:12.582425 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpb6l" event={"ID":"9a2a5d08-c449-45c6-8e1f-340c076422db","Type":"ContainerStarted","Data":"cd218687710b6fabb66404835f025c68b5ee6af1e63c65283186b8190108f4bb"} Jan 26 10:45:13 crc kubenswrapper[5003]: I0126 10:45:13.000735 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:13 crc kubenswrapper[5003]: E0126 10:45:13.000894 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:14 crc kubenswrapper[5003]: I0126 10:45:14.001307 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:14 crc kubenswrapper[5003]: I0126 10:45:14.001270 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:14 crc kubenswrapper[5003]: E0126 10:45:14.001435 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:14 crc kubenswrapper[5003]: E0126 10:45:14.001518 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:14 crc kubenswrapper[5003]: I0126 10:45:14.001326 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:14 crc kubenswrapper[5003]: E0126 10:45:14.001637 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:15 crc kubenswrapper[5003]: I0126 10:45:15.001216 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:15 crc kubenswrapper[5003]: E0126 10:45:15.002089 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:15 crc kubenswrapper[5003]: E0126 10:45:15.089572 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.001048 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.001202 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.001336 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:16 crc kubenswrapper[5003]: E0126 10:45:16.001550 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:16 crc kubenswrapper[5003]: E0126 10:45:16.002268 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:16 crc kubenswrapper[5003]: E0126 10:45:16.002365 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.002500 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.595258 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/3.log" Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.597495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerStarted","Data":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.597853 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.625390 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podStartSLOduration=112.62537191 podStartE2EDuration="1m52.62537191s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:16.624881946 +0000 UTC m=+132.166107507" watchObservedRunningTime="2026-01-26 10:45:16.62537191 +0000 UTC m=+132.166597471" Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.809522 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-4jrnq"] Jan 26 10:45:16 crc kubenswrapper[5003]: I0126 10:45:16.809650 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:16 crc kubenswrapper[5003]: E0126 10:45:16.809748 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:18 crc kubenswrapper[5003]: I0126 10:45:18.000749 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:18 crc kubenswrapper[5003]: I0126 10:45:18.000809 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:18 crc kubenswrapper[5003]: I0126 10:45:18.000831 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:18 crc kubenswrapper[5003]: E0126 10:45:18.000968 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:18 crc kubenswrapper[5003]: E0126 10:45:18.001072 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:18 crc kubenswrapper[5003]: E0126 10:45:18.001223 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:19 crc kubenswrapper[5003]: I0126 10:45:19.001476 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:19 crc kubenswrapper[5003]: E0126 10:45:19.001716 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4jrnq" podUID="aa06185d-fe5e-423a-b5a7-19e8bb7c8a60" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.001639 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.001731 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.001769 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:20 crc kubenswrapper[5003]: E0126 10:45:20.001862 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 10:45:20 crc kubenswrapper[5003]: E0126 10:45:20.002109 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 10:45:20 crc kubenswrapper[5003]: E0126 10:45:20.002362 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.867997 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.919540 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jjlwz"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.920160 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.921448 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.922322 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.922770 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.922977 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sl8wp"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.923202 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.923248 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.923447 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.924159 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.924197 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.924542 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7j782"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.924665 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.925257 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.926300 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.926544 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.926640 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.926725 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.926817 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.926832 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.926891 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.926989 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.927067 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.927759 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.927078 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.928091 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.928131 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.929427 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.929783 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.930253 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.930450 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.930660 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.930896 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.931054 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.932677 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-22wlq"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.933023 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.933507 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.934041 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.934257 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.934725 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.934941 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.935623 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.935645 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.935895 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.935943 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.935664 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.936256 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.936535 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.936747 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.937175 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-cpxlv"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.937264 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.937913 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.941468 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.941690 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.942043 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.942420 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.942682 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.945372 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.946243 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.956709 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.958762 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.966894 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-v5nfq"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.969007 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.969133 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.969345 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.969939 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972251 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-kqsg8"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.970132 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.970201 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.970309 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.970435 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.970647 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.970846 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971110 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971458 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971443 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971537 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971620 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971659 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971819 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971836 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.971981 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972068 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972136 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972169 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972179 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972201 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972275 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972310 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972396 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972418 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972474 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972515 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972583 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972751 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.972880 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.991512 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.994570 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s9544"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.995001 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rctvj"] Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.996101 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.996403 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.996545 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.996736 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.996884 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.997120 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.997442 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.997570 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.998767 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 26 10:45:20 crc kubenswrapper[5003]: I0126 10:45:20.998942 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.000326 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.000762 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.000822 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.000883 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.000995 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.001658 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.003411 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.007164 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.015716 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.016112 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.016231 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.016390 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.016459 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.016661 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.016750 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.016880 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.016968 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.018006 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.018552 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.021329 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.022445 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.023233 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.026970 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.033485 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.033719 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.033825 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.034145 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.034261 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.034196 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.034472 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.034519 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.034626 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.034168 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.043733 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.043834 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.046495 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.047778 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.050581 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057119 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057258 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glvh2\" (UniqueName: \"kubernetes.io/projected/16dd94f1-87e0-4fbd-910e-af2ece0fd525-kube-api-access-glvh2\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057300 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f149d971-e11c-471d-91a2-a8e5ed472e41-config\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057326 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xcbh\" (UniqueName: \"kubernetes.io/projected/35326ae8-7aba-468e-abf7-aab37519fc34-kube-api-access-2xcbh\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057350 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-encryption-config\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057369 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-etcd-client\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057388 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4v8f\" (UniqueName: \"kubernetes.io/projected/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-kube-api-access-m4v8f\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057432 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057457 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-audit-policies\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057484 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp7gz\" (UniqueName: \"kubernetes.io/projected/96cf2336-9c93-48fe-8d61-c9618714c1b2-kube-api-access-dp7gz\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057505 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-audit-policies\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057524 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057550 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkqqk\" (UniqueName: \"kubernetes.io/projected/89d62a5e-a294-4eff-b004-1f62339b8f6e-kube-api-access-pkqqk\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057574 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-serving-cert\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-config\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057630 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/35326ae8-7aba-468e-abf7-aab37519fc34-node-pullsecrets\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057654 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057680 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-etcd-client\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057698 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057720 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b8433bf-b4a0-4f65-9691-5da935026105-serving-cert\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.057809 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6gzr\" (UniqueName: \"kubernetes.io/projected/518875f1-cfde-4528-b15f-369cd105dc65-kube-api-access-c6gzr\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.058904 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.058951 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.058986 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-config\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059031 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62pf7\" (UniqueName: \"kubernetes.io/projected/b43bbdc1-9062-4460-85e7-4de472e0fd06-kube-api-access-62pf7\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059112 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-image-import-ca\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059147 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-etcd-serving-ca\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059184 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-encryption-config\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059246 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059318 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/16dd94f1-87e0-4fbd-910e-af2ece0fd525-audit-dir\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059389 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7b2d0b83-b35f-4128-af91-623a6871a431-audit-dir\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059439 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89d62a5e-a294-4eff-b004-1f62339b8f6e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059517 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-client-ca\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059554 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-oauth-serving-cert\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059577 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82664343-0cff-4d22-8287-786cc0ce857c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059613 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-config\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059666 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f149d971-e11c-471d-91a2-a8e5ed472e41-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059695 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-config\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059724 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059762 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059797 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-trusted-ca-bundle\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059848 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba581404-064d-46b7-a930-ab65f4d61a80-config\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059892 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-serving-cert\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.059977 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-client-ca\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.060011 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89d62a5e-a294-4eff-b004-1f62339b8f6e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.060042 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ba581404-064d-46b7-a930-ab65f4d61a80-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.060076 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv726\" (UniqueName: \"kubernetes.io/projected/f149d971-e11c-471d-91a2-a8e5ed472e41-kube-api-access-dv726\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.060100 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/35326ae8-7aba-468e-abf7-aab37519fc34-audit-dir\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.060160 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/518875f1-cfde-4528-b15f-369cd105dc65-config\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064455 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b43bbdc1-9062-4460-85e7-4de472e0fd06-serving-cert\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064505 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82664343-0cff-4d22-8287-786cc0ce857c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064551 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-audit\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064593 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064619 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064649 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dfvd\" (UniqueName: \"kubernetes.io/projected/6b8433bf-b4a0-4f65-9691-5da935026105-kube-api-access-5dfvd\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064682 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064855 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064905 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/518875f1-cfde-4528-b15f-369cd105dc65-auth-proxy-config\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.064989 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065017 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-serving-cert\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065040 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f149d971-e11c-471d-91a2-a8e5ed472e41-images\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065064 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-config\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065093 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz2kl\" (UniqueName: \"kubernetes.io/projected/82664343-0cff-4d22-8287-786cc0ce857c-kube-api-access-vz2kl\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065244 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065361 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-serving-cert\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065424 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-service-ca-bundle\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065471 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065496 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlltl\" (UniqueName: \"kubernetes.io/projected/a8192c61-0b99-47a2-af6c-aee9eff089f1-kube-api-access-dlltl\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065576 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96cf2336-9c93-48fe-8d61-c9618714c1b2-serving-cert\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065614 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/b43bbdc1-9062-4460-85e7-4de472e0fd06-available-featuregates\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065658 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh4t6\" (UniqueName: \"kubernetes.io/projected/7b2d0b83-b35f-4128-af91-623a6871a431-kube-api-access-hh4t6\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065742 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/518875f1-cfde-4528-b15f-369cd105dc65-machine-approver-tls\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065794 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba581404-064d-46b7-a930-ab65f4d61a80-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065847 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-oauth-config\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.065878 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-service-ca\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.066319 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2kvjq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.066723 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.067272 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.072888 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.073116 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-grvqx"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.077016 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.103106 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.103660 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.104184 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.104622 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.104854 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.105000 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.108808 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.109071 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.127944 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsgg6"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.128667 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.128933 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.129162 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.136588 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.137550 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.141309 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.143003 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.150748 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.151717 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.156665 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.157300 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.157444 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tlchc"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.164435 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166319 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-image-import-ca\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166350 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-encryption-config\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166368 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-etcd-serving-ca\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166382 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/16dd94f1-87e0-4fbd-910e-af2ece0fd525-audit-dir\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166399 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166414 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7b2d0b83-b35f-4128-af91-623a6871a431-audit-dir\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166428 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89d62a5e-a294-4eff-b004-1f62339b8f6e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166455 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-client-ca\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166474 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-oauth-serving-cert\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166495 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82664343-0cff-4d22-8287-786cc0ce857c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166518 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-config\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166535 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f149d971-e11c-471d-91a2-a8e5ed472e41-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166550 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-config\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166566 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166583 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166599 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-trusted-ca-bundle\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166615 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba581404-064d-46b7-a930-ab65f4d61a80-config\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166630 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-serving-cert\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166635 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7b2d0b83-b35f-4128-af91-623a6871a431-audit-dir\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166649 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-client-ca\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166717 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89d62a5e-a294-4eff-b004-1f62339b8f6e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166768 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ba581404-064d-46b7-a930-ab65f4d61a80-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166787 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv726\" (UniqueName: \"kubernetes.io/projected/f149d971-e11c-471d-91a2-a8e5ed472e41-kube-api-access-dv726\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166806 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/35326ae8-7aba-468e-abf7-aab37519fc34-audit-dir\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166950 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/518875f1-cfde-4528-b15f-369cd105dc65-config\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.166971 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b43bbdc1-9062-4460-85e7-4de472e0fd06-serving-cert\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167010 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82664343-0cff-4d22-8287-786cc0ce857c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167031 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-audit\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167050 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167088 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dfvd\" (UniqueName: \"kubernetes.io/projected/6b8433bf-b4a0-4f65-9691-5da935026105-kube-api-access-5dfvd\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167109 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167143 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167182 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/518875f1-cfde-4528-b15f-369cd105dc65-auth-proxy-config\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167213 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167303 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-serving-cert\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167331 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f149d971-e11c-471d-91a2-a8e5ed472e41-images\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167338 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-client-ca\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167347 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-config\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167397 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz2kl\" (UniqueName: \"kubernetes.io/projected/82664343-0cff-4d22-8287-786cc0ce857c-kube-api-access-vz2kl\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167420 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167438 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-serving-cert\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167455 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167472 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlltl\" (UniqueName: \"kubernetes.io/projected/a8192c61-0b99-47a2-af6c-aee9eff089f1-kube-api-access-dlltl\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167487 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-service-ca-bundle\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167514 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96cf2336-9c93-48fe-8d61-c9618714c1b2-serving-cert\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167531 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/b43bbdc1-9062-4460-85e7-4de472e0fd06-available-featuregates\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167548 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh4t6\" (UniqueName: \"kubernetes.io/projected/7b2d0b83-b35f-4128-af91-623a6871a431-kube-api-access-hh4t6\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167566 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/518875f1-cfde-4528-b15f-369cd105dc65-machine-approver-tls\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167583 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba581404-064d-46b7-a930-ab65f4d61a80-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167600 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-service-ca\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167616 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-oauth-config\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167633 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167650 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glvh2\" (UniqueName: \"kubernetes.io/projected/16dd94f1-87e0-4fbd-910e-af2ece0fd525-kube-api-access-glvh2\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167667 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xcbh\" (UniqueName: \"kubernetes.io/projected/35326ae8-7aba-468e-abf7-aab37519fc34-kube-api-access-2xcbh\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167681 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-encryption-config\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167695 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f149d971-e11c-471d-91a2-a8e5ed472e41-config\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167711 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-etcd-client\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167736 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4v8f\" (UniqueName: \"kubernetes.io/projected/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-kube-api-access-m4v8f\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167752 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167769 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-audit-policies\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167783 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp7gz\" (UniqueName: \"kubernetes.io/projected/96cf2336-9c93-48fe-8d61-c9618714c1b2-kube-api-access-dp7gz\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167800 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167815 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkqqk\" (UniqueName: \"kubernetes.io/projected/89d62a5e-a294-4eff-b004-1f62339b8f6e-kube-api-access-pkqqk\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167832 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-serving-cert\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167848 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-audit-policies\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167866 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82664343-0cff-4d22-8287-786cc0ce857c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167867 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-config\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167904 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/35326ae8-7aba-468e-abf7-aab37519fc34-node-pullsecrets\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167927 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167949 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167968 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b8433bf-b4a0-4f65-9691-5da935026105-serving-cert\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.167986 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6gzr\" (UniqueName: \"kubernetes.io/projected/518875f1-cfde-4528-b15f-369cd105dc65-kube-api-access-c6gzr\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.168002 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-etcd-client\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.168020 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.168039 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.168040 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-image-import-ca\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.168057 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-config\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.168090 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62pf7\" (UniqueName: \"kubernetes.io/projected/b43bbdc1-9062-4460-85e7-4de472e0fd06-kube-api-access-62pf7\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.169225 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-etcd-serving-ca\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.169865 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-config\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.169939 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-service-ca-bundle\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.170006 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/35326ae8-7aba-468e-abf7-aab37519fc34-audit-dir\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.170032 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89d62a5e-a294-4eff-b004-1f62339b8f6e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.170209 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/35326ae8-7aba-468e-abf7-aab37519fc34-node-pullsecrets\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.170350 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.170370 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-config\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.170538 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/518875f1-cfde-4528-b15f-369cd105dc65-config\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.170706 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-config\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.171376 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.171384 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-trusted-ca-bundle\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.171895 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba581404-064d-46b7-a930-ab65f4d61a80-config\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.173078 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-672x8"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.175071 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/16dd94f1-87e0-4fbd-910e-af2ece0fd525-audit-dir\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.175694 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-config\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.176624 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.177983 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-audit-policies\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.178320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b43bbdc1-9062-4460-85e7-4de472e0fd06-serving-cert\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.178791 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-audit\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.179359 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.179997 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-service-ca\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.180017 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f149d971-e11c-471d-91a2-a8e5ed472e41-config\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.180222 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.180339 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/b43bbdc1-9062-4460-85e7-4de472e0fd06-available-featuregates\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.180692 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b8433bf-b4a0-4f65-9691-5da935026105-config\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.182205 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.184068 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f149d971-e11c-471d-91a2-a8e5ed472e41-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.184214 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.184481 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b8433bf-b4a0-4f65-9691-5da935026105-serving-cert\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.184696 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/518875f1-cfde-4528-b15f-369cd105dc65-auth-proxy-config\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.184811 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/518875f1-cfde-4528-b15f-369cd105dc65-machine-approver-tls\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.185476 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f149d971-e11c-471d-91a2-a8e5ed472e41-images\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.185863 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89d62a5e-a294-4eff-b004-1f62339b8f6e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.186343 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82664343-0cff-4d22-8287-786cc0ce857c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.186353 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-serving-cert\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.186503 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ba581404-064d-46b7-a930-ab65f4d61a80-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.186688 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-encryption-config\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.186762 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-serving-cert\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.186962 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-audit-policies\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.187058 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.187079 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.187241 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.187376 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.187517 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.187669 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/35326ae8-7aba-468e-abf7-aab37519fc34-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.188034 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/35326ae8-7aba-468e-abf7-aab37519fc34-etcd-client\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.188089 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-client-ca\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.188297 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-oauth-config\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.188342 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.189848 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a8192c61-0b99-47a2-af6c-aee9eff089f1-console-serving-cert\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.190103 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a8192c61-0b99-47a2-af6c-aee9eff089f1-oauth-serving-cert\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.190725 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.190902 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/16dd94f1-87e0-4fbd-910e-af2ece0fd525-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.190991 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.191032 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.191367 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7zdzf"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.191465 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.191461 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-encryption-config\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.191921 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-tn4sr"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.192234 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.192242 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.192443 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-serving-cert\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.192579 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-tn4sr" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.193222 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.193860 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.193884 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7j782"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.193899 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sl8wp"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.193992 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.194013 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.194027 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-v5nfq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.194037 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jjlwz"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.194048 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.194052 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.194314 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/16dd94f1-87e0-4fbd-910e-af2ece0fd525-etcd-client\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.197835 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.197888 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96cf2336-9c93-48fe-8d61-c9618714c1b2-serving-cert\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.197915 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-s7vxm"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.200399 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-8wskq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.200596 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.202075 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.202111 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.202177 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-8wskq" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.202302 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.212739 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.214809 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.217335 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cpxlv"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.218580 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.219797 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s9544"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.220958 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.222080 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.223522 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-22wlq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.224714 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.227552 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.227737 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.229396 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.231566 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rctvj"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.232552 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.233240 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.234453 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-kqsg8"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.238618 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.239959 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsgg6"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.241264 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-8wskq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.242146 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.243046 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tlchc"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.244299 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7zdzf"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.245774 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.246863 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-672x8"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.247915 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-tn4sr"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.249085 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.250240 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.251566 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.253221 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2kvjq"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.254601 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-28mrx"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.255768 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.255980 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-s7vxm"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.257192 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-28mrx"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.258491 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-66jfc"] Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.259590 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.262904 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.282331 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.302730 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.323216 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.366063 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.389373 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.401938 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.422221 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.443001 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.462187 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.482431 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.508342 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.523484 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.543004 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.563152 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.583785 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.602757 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.622462 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.643003 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.663446 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.683258 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.702768 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.722224 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.743930 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.763186 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.782722 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.802673 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.822448 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.842589 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.863488 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.883366 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.902729 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.922852 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.943008 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.963182 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 26 10:45:21 crc kubenswrapper[5003]: I0126 10:45:21.983055 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.000604 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.000645 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.001080 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.001920 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.022262 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.043430 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.062468 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.109025 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.123209 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.140588 5003 request.go:700] Waited for 1.011317464s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/secrets?fieldSelector=metadata.name%3Dmarketplace-operator-metrics&limit=500&resourceVersion=0 Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.142716 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.163035 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.182638 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.203328 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.223420 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.242736 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.262151 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.282814 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.303203 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.323700 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.342481 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.363017 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.383523 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.421715 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv726\" (UniqueName: \"kubernetes.io/projected/f149d971-e11c-471d-91a2-a8e5ed472e41-kube-api-access-dv726\") pod \"machine-api-operator-5694c8668f-7j782\" (UID: \"f149d971-e11c-471d-91a2-a8e5ed472e41\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.448446 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62pf7\" (UniqueName: \"kubernetes.io/projected/b43bbdc1-9062-4460-85e7-4de472e0fd06-kube-api-access-62pf7\") pod \"openshift-config-operator-7777fb866f-9d9xc\" (UID: \"b43bbdc1-9062-4460-85e7-4de472e0fd06\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.466613 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ba581404-064d-46b7-a930-ab65f4d61a80-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vdrzc\" (UID: \"ba581404-064d-46b7-a930-ab65f4d61a80\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.480403 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6gzr\" (UniqueName: \"kubernetes.io/projected/518875f1-cfde-4528-b15f-369cd105dc65-kube-api-access-c6gzr\") pod \"machine-approver-56656f9798-s2rcp\" (UID: \"518875f1-cfde-4528-b15f-369cd105dc65\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.501873 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4v8f\" (UniqueName: \"kubernetes.io/projected/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-kube-api-access-m4v8f\") pod \"controller-manager-879f6c89f-sl8wp\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.503487 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.512724 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.521579 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dfvd\" (UniqueName: \"kubernetes.io/projected/6b8433bf-b4a0-4f65-9691-5da935026105-kube-api-access-5dfvd\") pod \"authentication-operator-69f744f599-v5nfq\" (UID: \"6b8433bf-b4a0-4f65-9691-5da935026105\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.539008 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh4t6\" (UniqueName: \"kubernetes.io/projected/7b2d0b83-b35f-4128-af91-623a6871a431-kube-api-access-hh4t6\") pod \"oauth-openshift-558db77b4-22wlq\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.562032 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp7gz\" (UniqueName: \"kubernetes.io/projected/96cf2336-9c93-48fe-8d61-c9618714c1b2-kube-api-access-dp7gz\") pod \"route-controller-manager-6576b87f9c-6w5n8\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.562421 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.581111 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkqqk\" (UniqueName: \"kubernetes.io/projected/89d62a5e-a294-4eff-b004-1f62339b8f6e-kube-api-access-pkqqk\") pod \"kube-storage-version-migrator-operator-b67b599dd-cn5sg\" (UID: \"89d62a5e-a294-4eff-b004-1f62339b8f6e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.607868 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glvh2\" (UniqueName: \"kubernetes.io/projected/16dd94f1-87e0-4fbd-910e-af2ece0fd525-kube-api-access-glvh2\") pod \"apiserver-7bbb656c7d-2rn99\" (UID: \"16dd94f1-87e0-4fbd-910e-af2ece0fd525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.629573 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.631843 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xcbh\" (UniqueName: \"kubernetes.io/projected/35326ae8-7aba-468e-abf7-aab37519fc34-kube-api-access-2xcbh\") pod \"apiserver-76f77b778f-jjlwz\" (UID: \"35326ae8-7aba-468e-abf7-aab37519fc34\") " pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.638997 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.642670 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.646828 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlltl\" (UniqueName: \"kubernetes.io/projected/a8192c61-0b99-47a2-af6c-aee9eff089f1-kube-api-access-dlltl\") pod \"console-f9d7485db-cpxlv\" (UID: \"a8192c61-0b99-47a2-af6c-aee9eff089f1\") " pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.661455 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.662924 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.677897 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.682866 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.702845 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.712224 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7j782"] Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.722606 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: W0126 10:45:22.724230 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf149d971_e11c_471d_91a2_a8e5ed472e41.slice/crio-0cee7152c930ca5646b823655f8b51dc1d543eddddd235bbabb4e274a1ff01c9 WatchSource:0}: Error finding container 0cee7152c930ca5646b823655f8b51dc1d543eddddd235bbabb4e274a1ff01c9: Status 404 returned error can't find the container with id 0cee7152c930ca5646b823655f8b51dc1d543eddddd235bbabb4e274a1ff01c9 Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.740479 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.742077 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.749312 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.755305 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz2kl\" (UniqueName: \"kubernetes.io/projected/82664343-0cff-4d22-8287-786cc0ce857c-kube-api-access-vz2kl\") pod \"openshift-apiserver-operator-796bbdcf4f-p8x6r\" (UID: \"82664343-0cff-4d22-8287-786cc0ce857c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.762870 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.782736 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.787545 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.791451 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-22wlq"] Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.802818 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.822443 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.823877 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.842203 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.862208 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.883936 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.888257 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cpxlv"] Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.908060 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc"] Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.912745 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.914232 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.923494 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.938008 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg"] Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.941806 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.951032 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sl8wp"] Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.962732 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 26 10:45:22 crc kubenswrapper[5003]: I0126 10:45:22.982588 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.002479 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.008205 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.022179 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.043134 5003 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.062524 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.083152 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.102562 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.122103 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.141047 5003 request.go:700] Waited for 1.92835915s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/secrets?fieldSelector=metadata.name%3Dcanary-serving-cert&limit=500&resourceVersion=0 Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.142471 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.162524 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.183369 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.203479 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.223440 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.243426 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.263509 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.303265 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.322834 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.342537 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 26 10:45:23 crc kubenswrapper[5003]: I0126 10:45:23.362971 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.273545 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-certificates\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.273704 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.273844 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.273902 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-trusted-ca\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.273994 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-tls\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.274878 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:25.774849269 +0000 UTC m=+141.316074840 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.294889 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" event={"ID":"f149d971-e11c-471d-91a2-a8e5ed472e41","Type":"ContainerStarted","Data":"0cee7152c930ca5646b823655f8b51dc1d543eddddd235bbabb4e274a1ff01c9"} Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.294935 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" event={"ID":"518875f1-cfde-4528-b15f-369cd105dc65","Type":"ContainerStarted","Data":"d632a719b44bef7dd6e10d08577735ce712bf4548b6a2291a671c24dd5d06167"} Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.375500 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.376090 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:25.876054316 +0000 UTC m=+141.417279887 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376164 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-ca\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376189 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2djc\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-kube-api-access-f2djc\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376274 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cbbr\" (UniqueName: \"kubernetes.io/projected/cace13a7-d60e-4a21-b606-e6cacde8ad36-kube-api-access-7cbbr\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376308 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e429f86-b4b3-4de7-8556-ce5973ea48e0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376358 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-service-ca\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376416 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-certificates\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376432 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-metrics-tls\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376494 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376513 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cace13a7-d60e-4a21-b606-e6cacde8ad36-serving-cert\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376542 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376560 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c152b47d-1462-4bec-9048-37ce680c0d19-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5kwm8\" (UID: \"c152b47d-1462-4bec-9048-37ce680c0d19\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376580 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1739818d-5558-4901-9b9d-3d735f5f30e3-metrics-tls\") pod \"dns-operator-744455d44c-rctvj\" (UID: \"1739818d-5558-4901-9b9d-3d735f5f30e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376631 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrdfv\" (UniqueName: \"kubernetes.io/projected/89d01f2e-05b5-4a11-b1f5-784b2a924c66-kube-api-access-rrdfv\") pod \"package-server-manager-789f6589d5-4n9nm\" (UID: \"89d01f2e-05b5-4a11-b1f5-784b2a924c66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376674 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-metrics-certs\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376693 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb1caca0-0426-492d-b7bc-7a074b5e86ca-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376708 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-bound-sa-token\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376724 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2lg8\" (UniqueName: \"kubernetes.io/projected/eb1caca0-0426-492d-b7bc-7a074b5e86ca-kube-api-access-v2lg8\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376783 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz5wc\" (UniqueName: \"kubernetes.io/projected/c152b47d-1462-4bec-9048-37ce680c0d19-kube-api-access-vz5wc\") pod \"control-plane-machine-set-operator-78cbb6b69f-5kwm8\" (UID: \"c152b47d-1462-4bec-9048-37ce680c0d19\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376803 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-trusted-ca\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376841 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-tls\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376861 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4e429f86-b4b3-4de7-8556-ce5973ea48e0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376883 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-default-certificate\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376919 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld857\" (UniqueName: \"kubernetes.io/projected/d95fc008-94a6-40af-b7bf-a55d2920775c-kube-api-access-ld857\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376940 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-service-ca-bundle\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376959 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wzjq\" (UniqueName: \"kubernetes.io/projected/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-kube-api-access-2wzjq\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.376979 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4x8r\" (UniqueName: \"kubernetes.io/projected/4e429f86-b4b3-4de7-8556-ce5973ea48e0-kube-api-access-h4x8r\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377015 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pvkr\" (UniqueName: \"kubernetes.io/projected/d3839473-1e0c-4987-a025-7be16d2e6006-kube-api-access-9pvkr\") pod \"cluster-samples-operator-665b6dd947-8wf52\" (UID: \"d3839473-1e0c-4987-a025-7be16d2e6006\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377047 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beba071b-8a4b-4aae-862c-793e659eaf30-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377069 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb1caca0-0426-492d-b7bc-7a074b5e86ca-proxy-tls\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377117 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-stats-auth\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377144 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377167 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/beba071b-8a4b-4aae-862c-793e659eaf30-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377189 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-425sk\" (UniqueName: \"kubernetes.io/projected/1739818d-5558-4901-9b9d-3d735f5f30e3-kube-api-access-425sk\") pod \"dns-operator-744455d44c-rctvj\" (UID: \"1739818d-5558-4901-9b9d-3d735f5f30e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377211 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-trusted-ca\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377233 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64d00de5-3879-4536-84a1-1c305b47c321-config\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377253 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-serving-cert\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.377304 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:25.877274381 +0000 UTC m=+141.418500022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377370 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wn56\" (UniqueName: \"kubernetes.io/projected/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-kube-api-access-9wn56\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377420 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-config\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377481 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/89d01f2e-05b5-4a11-b1f5-784b2a924c66-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4n9nm\" (UID: \"89d01f2e-05b5-4a11-b1f5-784b2a924c66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377508 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d95fc008-94a6-40af-b7bf-a55d2920775c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377525 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-client\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.377606 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d3839473-1e0c-4987-a025-7be16d2e6006-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8wf52\" (UID: \"d3839473-1e0c-4987-a025-7be16d2e6006\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.379811 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d95fc008-94a6-40af-b7bf-a55d2920775c-srv-cert\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.379855 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-trusted-ca\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.379894 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cblk6\" (UniqueName: \"kubernetes.io/projected/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-kube-api-access-cblk6\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.379946 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64d00de5-3879-4536-84a1-1c305b47c321-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.380008 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-config\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.380402 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64d00de5-3879-4536-84a1-1c305b47c321-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.380447 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/beba071b-8a4b-4aae-862c-793e659eaf30-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.380478 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.380577 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4e429f86-b4b3-4de7-8556-ce5973ea48e0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.386131 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.389402 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-tls\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.481328 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.481541 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:25.981496183 +0000 UTC m=+141.522721744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.481618 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.481653 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/beba071b-8a4b-4aae-862c-793e659eaf30-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.481702 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-config\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.481723 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-425sk\" (UniqueName: \"kubernetes.io/projected/1739818d-5558-4901-9b9d-3d735f5f30e3-kube-api-access-425sk\") pod \"dns-operator-744455d44c-rctvj\" (UID: \"1739818d-5558-4901-9b9d-3d735f5f30e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.481774 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-trusted-ca\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.481800 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64d00de5-3879-4536-84a1-1c305b47c321-config\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.481819 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-csi-data-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482422 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-serving-cert\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482454 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wn56\" (UniqueName: \"kubernetes.io/projected/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-kube-api-access-9wn56\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482488 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2100df7-b013-4d8a-8ab4-18e2506bdd02-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482508 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482528 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-config\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482546 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9clhp\" (UniqueName: \"kubernetes.io/projected/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-kube-api-access-9clhp\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482591 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx7nz\" (UniqueName: \"kubernetes.io/projected/0d96d414-365e-41ec-bbd0-02e8d36271be-kube-api-access-sx7nz\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482610 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/89d01f2e-05b5-4a11-b1f5-784b2a924c66-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4n9nm\" (UID: \"89d01f2e-05b5-4a11-b1f5-784b2a924c66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482681 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d95fc008-94a6-40af-b7bf-a55d2920775c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482722 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0d96d414-365e-41ec-bbd0-02e8d36271be-signing-key\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482749 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-client\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482769 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-config-volume\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482812 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-plugins-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482848 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d3839473-1e0c-4987-a025-7be16d2e6006-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8wf52\" (UID: \"d3839473-1e0c-4987-a025-7be16d2e6006\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482910 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d95fc008-94a6-40af-b7bf-a55d2920775c-srv-cert\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482925 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64d00de5-3879-4536-84a1-1c305b47c321-config\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482971 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cblk6\" (UniqueName: \"kubernetes.io/projected/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-kube-api-access-cblk6\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.482995 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fx8k\" (UniqueName: \"kubernetes.io/projected/bf9d71da-10fb-416e-919d-291fd4267dc9-kube-api-access-4fx8k\") pod \"multus-admission-controller-857f4d67dd-7zdzf\" (UID: \"bf9d71da-10fb-416e-919d-291fd4267dc9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483014 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64d00de5-3879-4536-84a1-1c305b47c321-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483057 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-config\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483074 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ndfc\" (UniqueName: \"kubernetes.io/projected/d2100df7-b013-4d8a-8ab4-18e2506bdd02-kube-api-access-6ndfc\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483089 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483126 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64d00de5-3879-4536-84a1-1c305b47c321-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483144 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-socket-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483161 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/beba071b-8a4b-4aae-862c-793e659eaf30-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483219 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngzzk\" (UniqueName: \"kubernetes.io/projected/0118bd40-a7ee-4622-913b-7395962ac6b8-kube-api-access-ngzzk\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483848 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483886 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4e429f86-b4b3-4de7-8556-ce5973ea48e0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483960 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d768c952-394c-4b6e-b4d9-7dbc838cefac-apiservice-cert\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.483984 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-mountpoint-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484007 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-ca\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484028 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-images\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484047 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-proxy-tls\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484065 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9d71da-10fb-416e-919d-291fd4267dc9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7zdzf\" (UID: \"bf9d71da-10fb-416e-919d-291fd4267dc9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484087 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2djc\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-kube-api-access-f2djc\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484106 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d62nh\" (UniqueName: \"kubernetes.io/projected/ab780a4c-5932-40c1-9383-f3d42238d2ac-kube-api-access-d62nh\") pod \"downloads-7954f5f757-tn4sr\" (UID: \"ab780a4c-5932-40c1-9383-f3d42238d2ac\") " pod="openshift-console/downloads-7954f5f757-tn4sr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484122 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0118bd40-a7ee-4622-913b-7395962ac6b8-metrics-tls\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484141 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-srv-cert\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484211 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cbbr\" (UniqueName: \"kubernetes.io/projected/cace13a7-d60e-4a21-b606-e6cacde8ad36-kube-api-access-7cbbr\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484229 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e429f86-b4b3-4de7-8556-ce5973ea48e0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484248 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2100df7-b013-4d8a-8ab4-18e2506bdd02-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484291 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484295 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-trusted-ca\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484311 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zrlf\" (UniqueName: \"kubernetes.io/projected/50c0df72-0f65-4ed5-a78b-89bfde4e3960-kube-api-access-6zrlf\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484331 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-service-ca\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484939 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-config\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.485396 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/beba071b-8a4b-4aae-862c-793e659eaf30-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.486208 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-ca\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.486239 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e429f86-b4b3-4de7-8556-ce5973ea48e0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.486445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-metrics-tls\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.486532 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0d96d414-365e-41ec-bbd0-02e8d36271be-signing-cabundle\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.486590 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0118bd40-a7ee-4622-913b-7395962ac6b8-config-volume\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.486638 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.486708 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d768c952-394c-4b6e-b4d9-7dbc838cefac-webhook-cert\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.486954 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:25.986941279 +0000 UTC m=+141.528166950 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.487344 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c152b47d-1462-4bec-9048-37ce680c0d19-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5kwm8\" (UID: \"c152b47d-1462-4bec-9048-37ce680c0d19\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.487393 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1739818d-5558-4901-9b9d-3d735f5f30e3-metrics-tls\") pod \"dns-operator-744455d44c-rctvj\" (UID: \"1739818d-5558-4901-9b9d-3d735f5f30e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.487416 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cace13a7-d60e-4a21-b606-e6cacde8ad36-serving-cert\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.487474 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-secret-volume\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.487483 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-client\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.488415 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-serving-cert\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.488571 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-profile-collector-cert\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.488826 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7-cert\") pod \"ingress-canary-8wskq\" (UID: \"0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7\") " pod="openshift-ingress-canary/ingress-canary-8wskq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.488871 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrdfv\" (UniqueName: \"kubernetes.io/projected/89d01f2e-05b5-4a11-b1f5-784b2a924c66-kube-api-access-rrdfv\") pod \"package-server-manager-789f6589d5-4n9nm\" (UID: \"89d01f2e-05b5-4a11-b1f5-784b2a924c66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.488980 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cace13a7-d60e-4a21-b606-e6cacde8ad36-etcd-service-ca\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489074 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-metrics-certs\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489147 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb1caca0-0426-492d-b7bc-7a074b5e86ca-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489189 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d3839473-1e0c-4987-a025-7be16d2e6006-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8wf52\" (UID: \"d3839473-1e0c-4987-a025-7be16d2e6006\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489289 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-bound-sa-token\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.484910 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-config\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489548 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-registration-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489671 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmk7p\" (UniqueName: \"kubernetes.io/projected/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-kube-api-access-hmk7p\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489703 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2lg8\" (UniqueName: \"kubernetes.io/projected/eb1caca0-0426-492d-b7bc-7a074b5e86ca-kube-api-access-v2lg8\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489768 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56f5v\" (UniqueName: \"kubernetes.io/projected/0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7-kube-api-access-56f5v\") pod \"ingress-canary-8wskq\" (UID: \"0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7\") " pod="openshift-ingress-canary/ingress-canary-8wskq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.489987 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz5wc\" (UniqueName: \"kubernetes.io/projected/c152b47d-1462-4bec-9048-37ce680c0d19-kube-api-access-vz5wc\") pod \"control-plane-machine-set-operator-78cbb6b69f-5kwm8\" (UID: \"c152b47d-1462-4bec-9048-37ce680c0d19\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.490171 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-trusted-ca\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.490229 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28bhm\" (UniqueName: \"kubernetes.io/projected/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-kube-api-access-28bhm\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.490398 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-serving-cert\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.490448 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z7ql\" (UniqueName: \"kubernetes.io/projected/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-kube-api-access-4z7ql\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.490472 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/50c0df72-0f65-4ed5-a78b-89bfde4e3960-certs\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.490756 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4e429f86-b4b3-4de7-8556-ce5973ea48e0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.490809 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-default-certificate\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.491392 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-trusted-ca\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.491567 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb1caca0-0426-492d-b7bc-7a074b5e86ca-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.491636 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsxwv\" (UniqueName: \"kubernetes.io/projected/d768c952-394c-4b6e-b4d9-7dbc838cefac-kube-api-access-rsxwv\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.491682 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld857\" (UniqueName: \"kubernetes.io/projected/d95fc008-94a6-40af-b7bf-a55d2920775c-kube-api-access-ld857\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.491714 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-service-ca-bundle\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.491826 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wzjq\" (UniqueName: \"kubernetes.io/projected/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-kube-api-access-2wzjq\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492049 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d768c952-394c-4b6e-b4d9-7dbc838cefac-tmpfs\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492104 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4x8r\" (UniqueName: \"kubernetes.io/projected/4e429f86-b4b3-4de7-8556-ce5973ea48e0-kube-api-access-h4x8r\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492263 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-529ll\" (UniqueName: \"kubernetes.io/projected/7dcb2031-bbdf-4c68-9d63-694bd2907756-kube-api-access-529ll\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492319 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pvkr\" (UniqueName: \"kubernetes.io/projected/d3839473-1e0c-4987-a025-7be16d2e6006-kube-api-access-9pvkr\") pod \"cluster-samples-operator-665b6dd947-8wf52\" (UID: \"d3839473-1e0c-4987-a025-7be16d2e6006\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492350 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qctz4\" (UniqueName: \"kubernetes.io/projected/aab55b24-007d-4dfb-a8a2-624b813920f9-kube-api-access-qctz4\") pod \"migrator-59844c95c7-bhzbs\" (UID: \"aab55b24-007d-4dfb-a8a2-624b813920f9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492366 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/50c0df72-0f65-4ed5-a78b-89bfde4e3960-node-bootstrap-token\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492398 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-service-ca-bundle\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492472 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9sc2\" (UniqueName: \"kubernetes.io/projected/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-kube-api-access-c9sc2\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492508 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beba071b-8a4b-4aae-862c-793e659eaf30-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492529 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb1caca0-0426-492d-b7bc-7a074b5e86ca-proxy-tls\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492541 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c152b47d-1462-4bec-9048-37ce680c0d19-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5kwm8\" (UID: \"c152b47d-1462-4bec-9048-37ce680c0d19\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492723 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-stats-auth\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.492915 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beba071b-8a4b-4aae-862c-793e659eaf30-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.494862 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-metrics-tls\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.497107 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.497932 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/89d01f2e-05b5-4a11-b1f5-784b2a924c66-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4n9nm\" (UID: \"89d01f2e-05b5-4a11-b1f5-784b2a924c66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.500867 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb1caca0-0426-492d-b7bc-7a074b5e86ca-proxy-tls\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.501764 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-default-certificate\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.515654 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cace13a7-d60e-4a21-b606-e6cacde8ad36-serving-cert\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.515808 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-stats-auth\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.515849 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4e429f86-b4b3-4de7-8556-ce5973ea48e0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.515984 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-metrics-certs\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.516504 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-425sk\" (UniqueName: \"kubernetes.io/projected/1739818d-5558-4901-9b9d-3d735f5f30e3-kube-api-access-425sk\") pod \"dns-operator-744455d44c-rctvj\" (UID: \"1739818d-5558-4901-9b9d-3d735f5f30e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.517078 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wn56\" (UniqueName: \"kubernetes.io/projected/3dc4301b-5dc4-4e39-a74b-9e46542e8dfb-kube-api-access-9wn56\") pod \"router-default-5444994796-grvqx\" (UID: \"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb\") " pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.517628 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cbbr\" (UniqueName: \"kubernetes.io/projected/cace13a7-d60e-4a21-b606-e6cacde8ad36-kube-api-access-7cbbr\") pod \"etcd-operator-b45778765-kqsg8\" (UID: \"cace13a7-d60e-4a21-b606-e6cacde8ad36\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.521110 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/beba071b-8a4b-4aae-862c-793e659eaf30-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6sl62\" (UID: \"beba071b-8a4b-4aae-862c-793e659eaf30\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.521435 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2lg8\" (UniqueName: \"kubernetes.io/projected/eb1caca0-0426-492d-b7bc-7a074b5e86ca-kube-api-access-v2lg8\") pod \"machine-config-controller-84d6567774-t55b4\" (UID: \"eb1caca0-0426-492d-b7bc-7a074b5e86ca\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.521462 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1739818d-5558-4901-9b9d-3d735f5f30e3-metrics-tls\") pod \"dns-operator-744455d44c-rctvj\" (UID: \"1739818d-5558-4901-9b9d-3d735f5f30e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.523093 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz5wc\" (UniqueName: \"kubernetes.io/projected/c152b47d-1462-4bec-9048-37ce680c0d19-kube-api-access-vz5wc\") pod \"control-plane-machine-set-operator-78cbb6b69f-5kwm8\" (UID: \"c152b47d-1462-4bec-9048-37ce680c0d19\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.523197 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2djc\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-kube-api-access-f2djc\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.523213 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64d00de5-3879-4536-84a1-1c305b47c321-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.523221 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cblk6\" (UniqueName: \"kubernetes.io/projected/25c939a4-4ebb-4ce6-a99d-3e9108c444cd-kube-api-access-cblk6\") pod \"console-operator-58897d9998-s9544\" (UID: \"25c939a4-4ebb-4ce6-a99d-3e9108c444cd\") " pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.523345 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrdfv\" (UniqueName: \"kubernetes.io/projected/89d01f2e-05b5-4a11-b1f5-784b2a924c66-kube-api-access-rrdfv\") pod \"package-server-manager-789f6589d5-4n9nm\" (UID: \"89d01f2e-05b5-4a11-b1f5-784b2a924c66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.525565 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-bound-sa-token\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.525682 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.526108 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4e429f86-b4b3-4de7-8556-ce5973ea48e0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.527160 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4x8r\" (UniqueName: \"kubernetes.io/projected/4e429f86-b4b3-4de7-8556-ce5973ea48e0-kube-api-access-h4x8r\") pod \"cluster-image-registry-operator-dc59b4c8b-hbddz\" (UID: \"4e429f86-b4b3-4de7-8556-ce5973ea48e0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.527181 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wzjq\" (UniqueName: \"kubernetes.io/projected/28dfb8e1-70f1-46b7-887a-c2ae4a892f60-kube-api-access-2wzjq\") pod \"ingress-operator-5b745b69d9-dtlw6\" (UID: \"28dfb8e1-70f1-46b7-887a-c2ae4a892f60\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.528120 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64d00de5-3879-4536-84a1-1c305b47c321-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sls4l\" (UID: \"64d00de5-3879-4536-84a1-1c305b47c321\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.528754 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-certificates\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.529100 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d95fc008-94a6-40af-b7bf-a55d2920775c-srv-cert\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.529524 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d95fc008-94a6-40af-b7bf-a55d2920775c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.529572 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-trusted-ca\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.538195 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pvkr\" (UniqueName: \"kubernetes.io/projected/d3839473-1e0c-4987-a025-7be16d2e6006-kube-api-access-9pvkr\") pod \"cluster-samples-operator-665b6dd947-8wf52\" (UID: \"d3839473-1e0c-4987-a025-7be16d2e6006\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.549874 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.569230 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.569851 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld857\" (UniqueName: \"kubernetes.io/projected/d95fc008-94a6-40af-b7bf-a55d2920775c-kube-api-access-ld857\") pod \"olm-operator-6b444d44fb-djnsq\" (UID: \"d95fc008-94a6-40af-b7bf-a55d2920775c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.581963 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595435 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595648 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d768c952-394c-4b6e-b4d9-7dbc838cefac-webhook-cert\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595677 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-secret-volume\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595701 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-profile-collector-cert\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595730 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7-cert\") pod \"ingress-canary-8wskq\" (UID: \"0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7\") " pod="openshift-ingress-canary/ingress-canary-8wskq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595762 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-registration-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595784 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmk7p\" (UniqueName: \"kubernetes.io/projected/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-kube-api-access-hmk7p\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595805 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56f5v\" (UniqueName: \"kubernetes.io/projected/0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7-kube-api-access-56f5v\") pod \"ingress-canary-8wskq\" (UID: \"0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7\") " pod="openshift-ingress-canary/ingress-canary-8wskq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595825 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28bhm\" (UniqueName: \"kubernetes.io/projected/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-kube-api-access-28bhm\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595855 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z7ql\" (UniqueName: \"kubernetes.io/projected/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-kube-api-access-4z7ql\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595875 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/50c0df72-0f65-4ed5-a78b-89bfde4e3960-certs\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595895 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-serving-cert\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595919 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsxwv\" (UniqueName: \"kubernetes.io/projected/d768c952-394c-4b6e-b4d9-7dbc838cefac-kube-api-access-rsxwv\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595940 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d768c952-394c-4b6e-b4d9-7dbc838cefac-tmpfs\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595963 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-529ll\" (UniqueName: \"kubernetes.io/projected/7dcb2031-bbdf-4c68-9d63-694bd2907756-kube-api-access-529ll\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.595981 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/50c0df72-0f65-4ed5-a78b-89bfde4e3960-node-bootstrap-token\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596002 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qctz4\" (UniqueName: \"kubernetes.io/projected/aab55b24-007d-4dfb-a8a2-624b813920f9-kube-api-access-qctz4\") pod \"migrator-59844c95c7-bhzbs\" (UID: \"aab55b24-007d-4dfb-a8a2-624b813920f9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596024 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9sc2\" (UniqueName: \"kubernetes.io/projected/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-kube-api-access-c9sc2\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596058 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-config\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596077 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-csi-data-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596103 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2100df7-b013-4d8a-8ab4-18e2506bdd02-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596127 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596151 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9clhp\" (UniqueName: \"kubernetes.io/projected/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-kube-api-access-9clhp\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596176 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx7nz\" (UniqueName: \"kubernetes.io/projected/0d96d414-365e-41ec-bbd0-02e8d36271be-kube-api-access-sx7nz\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596206 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0d96d414-365e-41ec-bbd0-02e8d36271be-signing-key\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596229 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-config-volume\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596252 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-plugins-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596303 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fx8k\" (UniqueName: \"kubernetes.io/projected/bf9d71da-10fb-416e-919d-291fd4267dc9-kube-api-access-4fx8k\") pod \"multus-admission-controller-857f4d67dd-7zdzf\" (UID: \"bf9d71da-10fb-416e-919d-291fd4267dc9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596335 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ndfc\" (UniqueName: \"kubernetes.io/projected/d2100df7-b013-4d8a-8ab4-18e2506bdd02-kube-api-access-6ndfc\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596356 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596383 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-socket-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596403 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngzzk\" (UniqueName: \"kubernetes.io/projected/0118bd40-a7ee-4622-913b-7395962ac6b8-kube-api-access-ngzzk\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596435 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d768c952-394c-4b6e-b4d9-7dbc838cefac-apiservice-cert\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596460 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-mountpoint-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596481 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9d71da-10fb-416e-919d-291fd4267dc9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7zdzf\" (UID: \"bf9d71da-10fb-416e-919d-291fd4267dc9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596504 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-images\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596528 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-proxy-tls\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596551 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d62nh\" (UniqueName: \"kubernetes.io/projected/ab780a4c-5932-40c1-9383-f3d42238d2ac-kube-api-access-d62nh\") pod \"downloads-7954f5f757-tn4sr\" (UID: \"ab780a4c-5932-40c1-9383-f3d42238d2ac\") " pod="openshift-console/downloads-7954f5f757-tn4sr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596571 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0118bd40-a7ee-4622-913b-7395962ac6b8-metrics-tls\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596589 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-srv-cert\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596617 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2100df7-b013-4d8a-8ab4-18e2506bdd02-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596640 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596660 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zrlf\" (UniqueName: \"kubernetes.io/projected/50c0df72-0f65-4ed5-a78b-89bfde4e3960-kube-api-access-6zrlf\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596694 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0d96d414-365e-41ec-bbd0-02e8d36271be-signing-cabundle\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.596715 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0118bd40-a7ee-4622-913b-7395962ac6b8-config-volume\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.597437 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0118bd40-a7ee-4622-913b-7395962ac6b8-config-volume\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.597530 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.097514342 +0000 UTC m=+141.638739903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.602562 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-mountpoint-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.609990 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d768c952-394c-4b6e-b4d9-7dbc838cefac-tmpfs\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.611994 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d768c952-394c-4b6e-b4d9-7dbc838cefac-webhook-cert\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.613676 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-csi-data-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.615076 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/50c0df72-0f65-4ed5-a78b-89bfde4e3960-node-bootstrap-token\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.619250 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-config-volume\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.619557 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-plugins-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.623439 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9clhp\" (UniqueName: \"kubernetes.io/projected/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-kube-api-access-9clhp\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.624954 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-images\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.625747 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsxwv\" (UniqueName: \"kubernetes.io/projected/d768c952-394c-4b6e-b4d9-7dbc838cefac-kube-api-access-rsxwv\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.625995 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2100df7-b013-4d8a-8ab4-18e2506bdd02-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.627191 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.627740 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-secret-volume\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.628089 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9d71da-10fb-416e-919d-291fd4267dc9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7zdzf\" (UID: \"bf9d71da-10fb-416e-919d-291fd4267dc9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.628257 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.631120 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-socket-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.637772 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9sc2\" (UniqueName: \"kubernetes.io/projected/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-kube-api-access-c9sc2\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.641899 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0118bd40-a7ee-4622-913b-7395962ac6b8-metrics-tls\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.648136 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ndfc\" (UniqueName: \"kubernetes.io/projected/d2100df7-b013-4d8a-8ab4-18e2506bdd02-kube-api-access-6ndfc\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.648719 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0d96d414-365e-41ec-bbd0-02e8d36271be-signing-cabundle\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.648958 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2100df7-b013-4d8a-8ab4-18e2506bdd02-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-54rgc\" (UID: \"d2100df7-b013-4d8a-8ab4-18e2506bdd02\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.649076 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7dcb2031-bbdf-4c68-9d63-694bd2907756-registration-dir\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.649255 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-config\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.649624 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.650114 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fx8k\" (UniqueName: \"kubernetes.io/projected/bf9d71da-10fb-416e-919d-291fd4267dc9-kube-api-access-4fx8k\") pod \"multus-admission-controller-857f4d67dd-7zdzf\" (UID: \"bf9d71da-10fb-416e-919d-291fd4267dc9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.655328 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.655766 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d768c952-394c-4b6e-b4d9-7dbc838cefac-apiservice-cert\") pod \"packageserver-d55dfcdfc-xphsr\" (UID: \"d768c952-394c-4b6e-b4d9-7dbc838cefac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.657850 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-profile-collector-cert\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.661381 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-srv-cert\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.664958 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0d96d414-365e-41ec-bbd0-02e8d36271be-signing-key\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.671313 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/50c0df72-0f65-4ed5-a78b-89bfde4e3960-certs\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.671673 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/94f616e2-cfa7-4b54-b6f8-4b07df5b714f-proxy-tls\") pod \"machine-config-operator-74547568cd-zdzvq\" (UID: \"94f616e2-cfa7-4b54-b6f8-4b07df5b714f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.672033 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmk7p\" (UniqueName: \"kubernetes.io/projected/8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b-kube-api-access-hmk7p\") pod \"catalog-operator-68c6474976-rgpfr\" (UID: \"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.675498 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z7ql\" (UniqueName: \"kubernetes.io/projected/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-kube-api-access-4z7ql\") pod \"marketplace-operator-79b997595-xsgg6\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.675783 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qctz4\" (UniqueName: \"kubernetes.io/projected/aab55b24-007d-4dfb-a8a2-624b813920f9-kube-api-access-qctz4\") pod \"migrator-59844c95c7-bhzbs\" (UID: \"aab55b24-007d-4dfb-a8a2-624b813920f9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.676175 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zrlf\" (UniqueName: \"kubernetes.io/projected/50c0df72-0f65-4ed5-a78b-89bfde4e3960-kube-api-access-6zrlf\") pod \"machine-config-server-66jfc\" (UID: \"50c0df72-0f65-4ed5-a78b-89bfde4e3960\") " pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.676297 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-529ll\" (UniqueName: \"kubernetes.io/projected/7dcb2031-bbdf-4c68-9d63-694bd2907756-kube-api-access-529ll\") pod \"csi-hostpathplugin-s7vxm\" (UID: \"7dcb2031-bbdf-4c68-9d63-694bd2907756\") " pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.678610 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngzzk\" (UniqueName: \"kubernetes.io/projected/0118bd40-a7ee-4622-913b-7395962ac6b8-kube-api-access-ngzzk\") pod \"dns-default-28mrx\" (UID: \"0118bd40-a7ee-4622-913b-7395962ac6b8\") " pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.681276 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa-serving-cert\") pod \"service-ca-operator-777779d784-672x8\" (UID: \"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.682778 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx7nz\" (UniqueName: \"kubernetes.io/projected/0d96d414-365e-41ec-bbd0-02e8d36271be-kube-api-access-sx7nz\") pod \"service-ca-9c57cc56f-tlchc\" (UID: \"0d96d414-365e-41ec-bbd0-02e8d36271be\") " pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.684548 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d62nh\" (UniqueName: \"kubernetes.io/projected/ab780a4c-5932-40c1-9383-f3d42238d2ac-kube-api-access-d62nh\") pod \"downloads-7954f5f757-tn4sr\" (UID: \"ab780a4c-5932-40c1-9383-f3d42238d2ac\") " pod="openshift-console/downloads-7954f5f757-tn4sr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.691083 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.697696 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-66jfc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.698431 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.698776 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.19876133 +0000 UTC m=+141.739986891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.757377 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.766817 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.774087 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.779345 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-v5nfq"] Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.782780 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.784258 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56f5v\" (UniqueName: \"kubernetes.io/projected/0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7-kube-api-access-56f5v\") pod \"ingress-canary-8wskq\" (UID: \"0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7\") " pod="openshift-ingress-canary/ingress-canary-8wskq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.784740 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7-cert\") pod \"ingress-canary-8wskq\" (UID: \"0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7\") " pod="openshift-ingress-canary/ingress-canary-8wskq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.796156 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28bhm\" (UniqueName: \"kubernetes.io/projected/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-kube-api-access-28bhm\") pod \"collect-profiles-29490405-ljdkv\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.796446 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.799560 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.799921 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.299895574 +0000 UTC m=+141.841121135 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.803394 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.810647 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.817416 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc"] Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.817781 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.827066 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" Jan 26 10:45:25 crc kubenswrapper[5003]: W0126 10:45:25.834053 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb43bbdc1_9062_4460_85e7_4de472e0fd06.slice/crio-f44d04fb43debbc7255b60a78676c5e49ef08f5e0fb7efb24417feb82b42aa0e WatchSource:0}: Error finding container f44d04fb43debbc7255b60a78676c5e49ef08f5e0fb7efb24417feb82b42aa0e: Status 404 returned error can't find the container with id f44d04fb43debbc7255b60a78676c5e49ef08f5e0fb7efb24417feb82b42aa0e Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.852826 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.880189 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.887534 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.894018 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.900465 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.900821 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:25 crc kubenswrapper[5003]: E0126 10:45:25.901237 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.401220194 +0000 UTC m=+141.942445755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.907111 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.914746 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.922127 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.934961 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.951752 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-tn4sr" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.952157 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8"] Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.957674 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.960090 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8"] Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.976693 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" Jan 26 10:45:25 crc kubenswrapper[5003]: I0126 10:45:25.984487 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-8wskq" Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.001634 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.001786 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.501763201 +0000 UTC m=+142.042988762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.001887 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.002222 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.502207854 +0000 UTC m=+142.043433415 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.030014 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r"] Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.040750 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jjlwz"] Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.050706 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99"] Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.103991 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.104532 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.604513882 +0000 UTC m=+142.145739443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.107175 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7zdzf"] Jan 26 10:45:26 crc kubenswrapper[5003]: W0126 10:45:26.109958 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35326ae8_7aba_468e_abf7_aab37519fc34.slice/crio-33ce8e85065788d756452e36ad871521a4c03fcb3cb40e0f3af422d880169559 WatchSource:0}: Error finding container 33ce8e85065788d756452e36ad871521a4c03fcb3cb40e0f3af422d880169559: Status 404 returned error can't find the container with id 33ce8e85065788d756452e36ad871521a4c03fcb3cb40e0f3af422d880169559 Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.132994 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52"] Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.153444 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm"] Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.220035 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.225565 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.725545034 +0000 UTC m=+142.266770585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.286819 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l"] Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.300354 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" event={"ID":"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea","Type":"ContainerStarted","Data":"feeb13f786eb4e33ecb6e3ff5f9ff321f7450497ae2dd96be891e4668942f1e4"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.301434 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" event={"ID":"96cf2336-9c93-48fe-8d61-c9618714c1b2","Type":"ContainerStarted","Data":"2755e6155bc15e72a54981e900c57edf9c870bdb8e94b9c73c5666ce98d746bc"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.305494 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" event={"ID":"518875f1-cfde-4528-b15f-369cd105dc65","Type":"ContainerStarted","Data":"52fd4f3a4d5451760a5ed9c4f7c528548cefa2a2462b00d246df562dca166df8"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.306993 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" event={"ID":"c152b47d-1462-4bec-9048-37ce680c0d19","Type":"ContainerStarted","Data":"554e9597b62ee9b31cb072858199156f8b9b1f023253ce77b36536e699006e81"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.308082 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-66jfc" event={"ID":"50c0df72-0f65-4ed5-a78b-89bfde4e3960","Type":"ContainerStarted","Data":"6606c9fda3a74a16d0f218bcc4fdcbb763aa135a642018f7fa2fc200a6899b1a"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.309072 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" event={"ID":"82664343-0cff-4d22-8287-786cc0ce857c","Type":"ContainerStarted","Data":"5c313cc7535f97c67b2e1a438ba648a2654fa0af0aa29540e7bc0a20455d353b"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.309813 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-grvqx" event={"ID":"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb","Type":"ContainerStarted","Data":"ea166b3bd6291696ba462ebeaacd826238a0f9eebd8e6575f43ff8ead5387091"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.310950 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cpxlv" event={"ID":"a8192c61-0b99-47a2-af6c-aee9eff089f1","Type":"ContainerStarted","Data":"274e3678c951ea89cce51411626167d6719ec3ccd85e0948438a77c8b0e2298d"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.311884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" event={"ID":"b43bbdc1-9062-4460-85e7-4de472e0fd06","Type":"ContainerStarted","Data":"f44d04fb43debbc7255b60a78676c5e49ef08f5e0fb7efb24417feb82b42aa0e"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.312944 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" event={"ID":"6b8433bf-b4a0-4f65-9691-5da935026105","Type":"ContainerStarted","Data":"c0cf1b02a77cbcdcedf6e695ae683f3142c8e601bf9209be8dbd022993b96e03"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.313570 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" event={"ID":"16dd94f1-87e0-4fbd-910e-af2ece0fd525","Type":"ContainerStarted","Data":"f6221d2ce337f0c1ea9b7a956f6ffe167c6b657be858c0b82c2ad7fd04211e7f"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.315131 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" event={"ID":"ba581404-064d-46b7-a930-ab65f4d61a80","Type":"ContainerStarted","Data":"28a8ea155ab0d7c2fb90ea50aca0e79c3d2d0e75a34b4595eae5f11f3a0b0667"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.319433 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" event={"ID":"7b2d0b83-b35f-4128-af91-623a6871a431","Type":"ContainerStarted","Data":"024a3fd4ad33291bd953c7094cd47e23de15d599ebadd9335b88263dd9666157"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.321229 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" event={"ID":"35326ae8-7aba-468e-abf7-aab37519fc34","Type":"ContainerStarted","Data":"33ce8e85065788d756452e36ad871521a4c03fcb3cb40e0f3af422d880169559"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.321647 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.321855 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.821830631 +0000 UTC m=+142.363056192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.322022 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.322523 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.82250828 +0000 UTC m=+142.363733841 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.327786 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" event={"ID":"f149d971-e11c-471d-91a2-a8e5ed472e41","Type":"ContainerStarted","Data":"71e8a944b5475340bc184fd913010443881e2d6d15be8191e531d377fb7f55e8"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.333625 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" event={"ID":"89d62a5e-a294-4eff-b004-1f62339b8f6e","Type":"ContainerStarted","Data":"db932dac557ad49e2342f6a5a93e733cd0487b1cf81cc0a179cb8f9d94a46bf8"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.335982 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" event={"ID":"bf9d71da-10fb-416e-919d-291fd4267dc9","Type":"ContainerStarted","Data":"2e47b0bc688ba0dfa8759dd1b2b4f208f330ac21c8035c99681008f374190dee"} Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.424400 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.424754 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.924738085 +0000 UTC m=+142.465963646 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.424995 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.425331 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:26.925321582 +0000 UTC m=+142.466547203 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.532184 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.532644 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.032615292 +0000 UTC m=+142.573840853 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.633194 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.633533 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.13351807 +0000 UTC m=+142.674743631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.734137 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.734274 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.234243333 +0000 UTC m=+142.775468894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.734507 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.734826 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.234816979 +0000 UTC m=+142.776042550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.835436 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.835930 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.335911563 +0000 UTC m=+142.877137144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:26 crc kubenswrapper[5003]: I0126 10:45:26.936637 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:26 crc kubenswrapper[5003]: E0126 10:45:26.937253 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.437229103 +0000 UTC m=+142.978454704 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.038361 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.038813 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.538792819 +0000 UTC m=+143.080018380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.140013 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.140830 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.640816019 +0000 UTC m=+143.182041580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.210191 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.235149 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s9544"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.245501 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.246472 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.745891456 +0000 UTC m=+143.287117017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.266622 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-kqsg8"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.272564 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-28mrx"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.274803 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6"] Jan 26 10:45:27 crc kubenswrapper[5003]: W0126 10:45:27.291109 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28dfb8e1_70f1_46b7_887a_c2ae4a892f60.slice/crio-d15cd1a406e6e91a8433d0aa142f97d54e19a2446ef7d00005ec0034c6364314 WatchSource:0}: Error finding container d15cd1a406e6e91a8433d0aa142f97d54e19a2446ef7d00005ec0034c6364314: Status 404 returned error can't find the container with id d15cd1a406e6e91a8433d0aa142f97d54e19a2446ef7d00005ec0034c6364314 Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.291547 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4"] Jan 26 10:45:27 crc kubenswrapper[5003]: W0126 10:45:27.335698 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb1caca0_0426_492d_b7bc_7a074b5e86ca.slice/crio-2b147e174a76aecd1435b314506769df4e4bfff95ddf1aab80403c3d008c9595 WatchSource:0}: Error finding container 2b147e174a76aecd1435b314506769df4e4bfff95ddf1aab80403c3d008c9595: Status 404 returned error can't find the container with id 2b147e174a76aecd1435b314506769df4e4bfff95ddf1aab80403c3d008c9595 Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.347858 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-66jfc" event={"ID":"50c0df72-0f65-4ed5-a78b-89bfde4e3960","Type":"ContainerStarted","Data":"917cdd155c2e99349feff9ec808755d62c9203722e4fce3aa88b264071c422ac"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.351734 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.352979 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.852927189 +0000 UTC m=+143.394152750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.374790 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s9544" event={"ID":"25c939a4-4ebb-4ce6-a99d-3e9108c444cd","Type":"ContainerStarted","Data":"59a492481a09f7f40b96d8bcd05980e3976b696856bdf69d8e2e13a4eda278a9"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.388729 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" event={"ID":"beba071b-8a4b-4aae-862c-793e659eaf30","Type":"ContainerStarted","Data":"6bb430adc510405095c7f3c2f246332380dd0ad3ee2e6b2725745a8ca6d9d792"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.413657 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" event={"ID":"6b8433bf-b4a0-4f65-9691-5da935026105","Type":"ContainerStarted","Data":"a1703575190feb9e99cec71825bf0773c7283c98d62543860c3376ababc29004"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.422995 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-66jfc" podStartSLOduration=6.422969257 podStartE2EDuration="6.422969257s" podCreationTimestamp="2026-01-26 10:45:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.398736886 +0000 UTC m=+142.939962457" watchObservedRunningTime="2026-01-26 10:45:27.422969257 +0000 UTC m=+142.964194818" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.426671 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.430545 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-8wskq"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.433156 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" event={"ID":"28dfb8e1-70f1-46b7-887a-c2ae4a892f60","Type":"ContainerStarted","Data":"d15cd1a406e6e91a8433d0aa142f97d54e19a2446ef7d00005ec0034c6364314"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.440911 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5nfq" podStartSLOduration=123.440893598 podStartE2EDuration="2m3.440893598s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.438117029 +0000 UTC m=+142.979342600" watchObservedRunningTime="2026-01-26 10:45:27.440893598 +0000 UTC m=+142.982119159" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.444198 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" event={"ID":"f149d971-e11c-471d-91a2-a8e5ed472e41","Type":"ContainerStarted","Data":"e41b9f000b5ab7623534f1e1ebcffaa8c97dc3e19ee3dab6741c337d2c0a1474"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.452495 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsgg6"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.453560 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.456935 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:27.956911845 +0000 UTC m=+143.498137406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.464681 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" event={"ID":"c152b47d-1462-4bec-9048-37ce680c0d19","Type":"ContainerStarted","Data":"308eecccde8c2cbed1103ced45ae65b4a9dc2afdbfb1f9514460542f8942475e"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.467953 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rctvj"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.468169 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-7j782" podStartSLOduration=122.468153346 podStartE2EDuration="2m2.468153346s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.465888091 +0000 UTC m=+143.007113652" watchObservedRunningTime="2026-01-26 10:45:27.468153346 +0000 UTC m=+143.009378907" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.480597 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cpxlv" event={"ID":"a8192c61-0b99-47a2-af6c-aee9eff089f1","Type":"ContainerStarted","Data":"84aad0831c03cf894808bc70a0212b70141d033ef8571ccb6467b1f32b00b47c"} Jan 26 10:45:27 crc kubenswrapper[5003]: W0126 10:45:27.488163 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a8e5c8f_a945_4071_9e4b_eaffcc05b4f7.slice/crio-8f251cb5b1b679a95fa42ea7ca3c4b51bb60f03cd89d6223dd79dd9b08f1a71a WatchSource:0}: Error finding container 8f251cb5b1b679a95fa42ea7ca3c4b51bb60f03cd89d6223dd79dd9b08f1a71a: Status 404 returned error can't find the container with id 8f251cb5b1b679a95fa42ea7ca3c4b51bb60f03cd89d6223dd79dd9b08f1a71a Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.493469 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5kwm8" podStartSLOduration=122.493445787 podStartE2EDuration="2m2.493445787s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.48652064 +0000 UTC m=+143.027746231" watchObservedRunningTime="2026-01-26 10:45:27.493445787 +0000 UTC m=+143.034671348" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.497301 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" event={"ID":"518875f1-cfde-4528-b15f-369cd105dc65","Type":"ContainerStarted","Data":"4fb04fee2e2ac76dc3fde8daf55c91a65c5b8165bc97b6a9fee0f3d07c23002e"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.510978 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" event={"ID":"35326ae8-7aba-468e-abf7-aab37519fc34","Type":"ContainerStarted","Data":"d3ed6bb629279cd1139c547c32a96ba5aa1b7e8c8e4da6b2d14f7c04b68265ab"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.512119 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-cpxlv" podStartSLOduration=123.512105489 podStartE2EDuration="2m3.512105489s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.510853654 +0000 UTC m=+143.052079215" watchObservedRunningTime="2026-01-26 10:45:27.512105489 +0000 UTC m=+143.053331050" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.539785 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2rcp" podStartSLOduration=123.539767118 podStartE2EDuration="2m3.539767118s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.538736319 +0000 UTC m=+143.079961910" watchObservedRunningTime="2026-01-26 10:45:27.539767118 +0000 UTC m=+143.080992679" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.546627 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" event={"ID":"bf9d71da-10fb-416e-919d-291fd4267dc9","Type":"ContainerStarted","Data":"5b5103f08b12081f1aa3aa09d887a4c15af07852d8bae1a8d68c23071886e6c6"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.553971 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" event={"ID":"82664343-0cff-4d22-8287-786cc0ce857c","Type":"ContainerStarted","Data":"093402a97e14301a048fc7d2646f6d7e430278c9592936c865756dcf125d02f4"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.557841 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.560719 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.060705075 +0000 UTC m=+143.601930636 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.571721 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" event={"ID":"89d62a5e-a294-4eff-b004-1f62339b8f6e","Type":"ContainerStarted","Data":"4c0233e69db2f5140d711c2a87dc7d6839e2c2010ad500903439683bd1533d63"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.583728 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" event={"ID":"cace13a7-d60e-4a21-b606-e6cacde8ad36","Type":"ContainerStarted","Data":"49cd0b686ec257781624e45496e84bcca27b8660e1e23e634e3107b57cd7d2de"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.605626 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-grvqx" event={"ID":"3dc4301b-5dc4-4e39-a74b-9e46542e8dfb","Type":"ContainerStarted","Data":"33140f2f81691763fc469bad9393aea5e40c05bc50bc2575daee5f82d7b94022"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.615404 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cn5sg" podStartSLOduration=122.615385105 podStartE2EDuration="2m2.615385105s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.614977583 +0000 UTC m=+143.156203144" watchObservedRunningTime="2026-01-26 10:45:27.615385105 +0000 UTC m=+143.156610676" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.641763 5003 generic.go:334] "Generic (PLEG): container finished" podID="b43bbdc1-9062-4460-85e7-4de472e0fd06" containerID="c7c24696f1bade59b15fe40840aeff6301a667350d00773e890b6a12e619c35f" exitCode=0 Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.642875 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" event={"ID":"b43bbdc1-9062-4460-85e7-4de472e0fd06","Type":"ContainerDied","Data":"c7c24696f1bade59b15fe40840aeff6301a667350d00773e890b6a12e619c35f"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.660806 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.663667 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.163637341 +0000 UTC m=+143.704862912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.666063 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.676249 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-s7vxm"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.695727 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.698377 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tlchc"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.705826 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-p8x6r" podStartSLOduration=123.705802444 podStartE2EDuration="2m3.705802444s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.689121018 +0000 UTC m=+143.230346579" watchObservedRunningTime="2026-01-26 10:45:27.705802444 +0000 UTC m=+143.247028005" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.723892 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-672x8"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.724012 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" event={"ID":"d3839473-1e0c-4987-a025-7be16d2e6006","Type":"ContainerStarted","Data":"8f259a99e760361ff26d841e3056a34514fe5e2227403eee10a892376865e243"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.724041 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" event={"ID":"d3839473-1e0c-4987-a025-7be16d2e6006","Type":"ContainerStarted","Data":"1242a9ce6e0701a844279de0bc5dc48ed0471fef303aa4116d0c8e659c8b5b5a"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.726368 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.774970 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.775339 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.275326387 +0000 UTC m=+143.816551948 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.775792 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" event={"ID":"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea","Type":"ContainerStarted","Data":"7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.777324 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.786254 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.786335 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.806796 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.809084 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-28mrx" event={"ID":"0118bd40-a7ee-4622-913b-7395962ac6b8","Type":"ContainerStarted","Data":"b5d05d0925ddcb9ce344455d2d1955f940098075efc2a03132c1631f38dcff9c"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.815555 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.822899 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.865933 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" event={"ID":"16dd94f1-87e0-4fbd-910e-af2ece0fd525","Type":"ContainerStarted","Data":"e4cb76292456155915d5cbbcf483c349f1ee6e995b18a139dff9bdedb9ea31ed"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.873638 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-tn4sr"] Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.881563 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.885847 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-grvqx" podStartSLOduration=123.885820518 podStartE2EDuration="2m3.885820518s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.798739065 +0000 UTC m=+143.339964626" watchObservedRunningTime="2026-01-26 10:45:27.885820518 +0000 UTC m=+143.427046079" Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.889813 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.389777971 +0000 UTC m=+143.931003542 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.891028 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" event={"ID":"96cf2336-9c93-48fe-8d61-c9618714c1b2","Type":"ContainerStarted","Data":"c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.892038 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" podStartSLOduration=123.892020225 podStartE2EDuration="2m3.892020225s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.817329635 +0000 UTC m=+143.358555216" watchObservedRunningTime="2026-01-26 10:45:27.892020225 +0000 UTC m=+143.433245786" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.892609 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.895204 5003 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6w5n8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.895248 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" podUID="96cf2336-9c93-48fe-8d61-c9618714c1b2" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.905121 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" event={"ID":"ba581404-064d-46b7-a930-ab65f4d61a80","Type":"ContainerStarted","Data":"6cd2c7c2d889c5b04929839d7d914b08f331b1322bfa1352a53abd508ce19b0a"} Jan 26 10:45:27 crc kubenswrapper[5003]: W0126 10:45:27.906826 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e429f86_b4b3_4de7_8556_ce5973ea48e0.slice/crio-24360934bba12cb9dca5441a93c9d0ecd17b6a4a85e6f07f1a07d9d1a350a74b WatchSource:0}: Error finding container 24360934bba12cb9dca5441a93c9d0ecd17b6a4a85e6f07f1a07d9d1a350a74b: Status 404 returned error can't find the container with id 24360934bba12cb9dca5441a93c9d0ecd17b6a4a85e6f07f1a07d9d1a350a74b Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.929328 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" podStartSLOduration=122.929266108 podStartE2EDuration="2m2.929266108s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.922153485 +0000 UTC m=+143.463379046" watchObservedRunningTime="2026-01-26 10:45:27.929266108 +0000 UTC m=+143.470491689" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.944440 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" event={"ID":"7b2d0b83-b35f-4128-af91-623a6871a431","Type":"ContainerStarted","Data":"6e9ea2e58f3c75874a0197e2a425cd4b1bc85408cb5839c48ad3a61b89602d15"} Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.947935 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.948899 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vdrzc" podStartSLOduration=123.948878887 podStartE2EDuration="2m3.948878887s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.948614799 +0000 UTC m=+143.489840380" watchObservedRunningTime="2026-01-26 10:45:27.948878887 +0000 UTC m=+143.490104448" Jan 26 10:45:27 crc kubenswrapper[5003]: I0126 10:45:27.984942 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:27 crc kubenswrapper[5003]: E0126 10:45:27.988598 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.488582379 +0000 UTC m=+144.029808000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.012798 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" event={"ID":"64d00de5-3879-4536-84a1-1c305b47c321","Type":"ContainerStarted","Data":"b8c4091f5a52950107f6625d8e54fc859237a1f4d83ba8cdbffc81aea06440e6"} Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.012862 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" event={"ID":"64d00de5-3879-4536-84a1-1c305b47c321","Type":"ContainerStarted","Data":"cd9db15c7d991ed660d7719f7f5feb19d9b948808908dd35f7ad54467aa28ac3"} Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.028209 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" event={"ID":"89d01f2e-05b5-4a11-b1f5-784b2a924c66","Type":"ContainerStarted","Data":"7f5a98674fed85707a4c1355976a05c6f0aeb63faf0e7362857dbaa61e0a3613"} Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.028253 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" event={"ID":"89d01f2e-05b5-4a11-b1f5-784b2a924c66","Type":"ContainerStarted","Data":"d844ea35010e9c280c4113f16d520b8d5350fc75eac4e8053a30f00cc25a2e80"} Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.028844 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.034528 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" podStartSLOduration=124.034503059 podStartE2EDuration="2m4.034503059s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:27.986973483 +0000 UTC m=+143.528199054" watchObservedRunningTime="2026-01-26 10:45:28.034503059 +0000 UTC m=+143.575728620" Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.069215 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sls4l" podStartSLOduration=124.069198489 podStartE2EDuration="2m4.069198489s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:28.037263398 +0000 UTC m=+143.578488959" watchObservedRunningTime="2026-01-26 10:45:28.069198489 +0000 UTC m=+143.610424050" Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.070625 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" podStartSLOduration=123.070617429 podStartE2EDuration="2m3.070617429s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:28.068043376 +0000 UTC m=+143.609268937" watchObservedRunningTime="2026-01-26 10:45:28.070617429 +0000 UTC m=+143.611842990" Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.088985 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.089181 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.589148528 +0000 UTC m=+144.130374089 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.089710 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.091190 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.591173065 +0000 UTC m=+144.132398716 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.150432 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.190686 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.190832 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.690812207 +0000 UTC m=+144.232037768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.191016 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.191710 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.691701743 +0000 UTC m=+144.232927304 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.294368 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.294619 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.794579467 +0000 UTC m=+144.335805038 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.294845 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.295299 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.795274567 +0000 UTC m=+144.336500128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.397096 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.397987 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.897942165 +0000 UTC m=+144.439167726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.499175 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.499780 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:28.999764959 +0000 UTC m=+144.540990530 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.552206 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.558650 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:28 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:28 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:28 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.558711 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.602974 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.603351 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.103335183 +0000 UTC m=+144.644560744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.704533 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.705063 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.205036524 +0000 UTC m=+144.746262145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.807116 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.807517 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.307502997 +0000 UTC m=+144.848728558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:28 crc kubenswrapper[5003]: I0126 10:45:28.908742 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:28 crc kubenswrapper[5003]: E0126 10:45:28.909377 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.409366252 +0000 UTC m=+144.950591813 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.009914 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.010799 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.510778775 +0000 UTC m=+145.052004336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.098085 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" event={"ID":"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa","Type":"ContainerStarted","Data":"a1938cb4b519fd6d2037349467573e7d6f4923f72524ffa3c04c4f4e1b89100e"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.098134 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" event={"ID":"cd207bc0-8b94-46b2-acb0-ac9db8d1e0aa","Type":"ContainerStarted","Data":"6b90539393860a419355059bb27548ab568fa323637cc2a15df2d0a64964b926"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.112218 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.112553 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.612538047 +0000 UTC m=+145.153763608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.137209 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" event={"ID":"cace13a7-d60e-4a21-b606-e6cacde8ad36","Type":"ContainerStarted","Data":"7e79e1736f8ef5566bc1483492a1024ee1059641fcd7e5144e9a82d8d1ca6467"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.139904 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" event={"ID":"28dfb8e1-70f1-46b7-887a-c2ae4a892f60","Type":"ContainerStarted","Data":"ef827a72b254935f5c278c5e9efc95d0b464ba06b2a5949b3a43613075c270fa"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.139937 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" event={"ID":"28dfb8e1-70f1-46b7-887a-c2ae4a892f60","Type":"ContainerStarted","Data":"9827e0ec791e1b2be7c4edbb3331168d292dcae5d2557fa36acb76882b76633d"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.169110 5003 generic.go:334] "Generic (PLEG): container finished" podID="35326ae8-7aba-468e-abf7-aab37519fc34" containerID="d3ed6bb629279cd1139c547c32a96ba5aa1b7e8c8e4da6b2d14f7c04b68265ab" exitCode=0 Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.169439 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" event={"ID":"35326ae8-7aba-468e-abf7-aab37519fc34","Type":"ContainerDied","Data":"d3ed6bb629279cd1139c547c32a96ba5aa1b7e8c8e4da6b2d14f7c04b68265ab"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.169464 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" event={"ID":"35326ae8-7aba-468e-abf7-aab37519fc34","Type":"ContainerStarted","Data":"b57898fc4b50cca9b6d4de0ccf39207ad8318590adb0ccfb979a43e065becfb2"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.182953 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-8wskq" event={"ID":"0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7","Type":"ContainerStarted","Data":"2d2bb8d70e896d74415eda656598e68404519e9f0d9b2e74b9bf7665af0348a5"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.183012 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-8wskq" event={"ID":"0a8e5c8f-a945-4071-9e4b-eaffcc05b4f7","Type":"ContainerStarted","Data":"8f251cb5b1b679a95fa42ea7ca3c4b51bb60f03cd89d6223dd79dd9b08f1a71a"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.190971 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" event={"ID":"bf9d71da-10fb-416e-919d-291fd4267dc9","Type":"ContainerStarted","Data":"cb3c01c0a0c170da204cee4f18c78f32128a5d2464cf0131c18c0ee26e9d40d3"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.212891 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.213194 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.713169007 +0000 UTC m=+145.254394568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.213518 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.214273 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.714265478 +0000 UTC m=+145.255491039 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.215827 5003 generic.go:334] "Generic (PLEG): container finished" podID="16dd94f1-87e0-4fbd-910e-af2ece0fd525" containerID="e4cb76292456155915d5cbbcf483c349f1ee6e995b18a139dff9bdedb9ea31ed" exitCode=0 Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.215901 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" event={"ID":"16dd94f1-87e0-4fbd-910e-af2ece0fd525","Type":"ContainerDied","Data":"e4cb76292456155915d5cbbcf483c349f1ee6e995b18a139dff9bdedb9ea31ed"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.222487 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" event={"ID":"94f616e2-cfa7-4b54-b6f8-4b07df5b714f","Type":"ContainerStarted","Data":"ed9f3f6be5313093f04eb3cd8eeca1d4e9ac74b0f41d1581ecc7cf79b84ef9f1"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.242507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" event={"ID":"d2100df7-b013-4d8a-8ab4-18e2506bdd02","Type":"ContainerStarted","Data":"cf69951c84eb3a4402383a1ad768bfffb2717fb5b32373d54ccf5e7834df00b3"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.275976 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" event={"ID":"89d01f2e-05b5-4a11-b1f5-784b2a924c66","Type":"ContainerStarted","Data":"16a395cd3d1f2dbe819909e46ae7bcdf63220650e5c5d9ff054c98614e26d627"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.278873 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-kqsg8" podStartSLOduration=125.27883889 podStartE2EDuration="2m5.27883889s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.210016627 +0000 UTC m=+144.751242188" watchObservedRunningTime="2026-01-26 10:45:29.27883889 +0000 UTC m=+144.820064451" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.314844 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.315014 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.814986611 +0000 UTC m=+145.356212172 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.315233 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.316454 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.816441963 +0000 UTC m=+145.357667524 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.345329 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-7zdzf" podStartSLOduration=124.345313426 podStartE2EDuration="2m4.345313426s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.344673628 +0000 UTC m=+144.885899209" watchObservedRunningTime="2026-01-26 10:45:29.345313426 +0000 UTC m=+144.886538977" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.346156 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtlw6" podStartSLOduration=125.34615187 podStartE2EDuration="2m5.34615187s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.286131058 +0000 UTC m=+144.827356619" watchObservedRunningTime="2026-01-26 10:45:29.34615187 +0000 UTC m=+144.887377431" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.398515 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" event={"ID":"aab55b24-007d-4dfb-a8a2-624b813920f9","Type":"ContainerStarted","Data":"09c5ef3ae92d66084e81735e77b0d20aed4db69512abf1311424803da2ccbe1f"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.398562 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" event={"ID":"aab55b24-007d-4dfb-a8a2-624b813920f9","Type":"ContainerStarted","Data":"7541501bf883ac27a6f279066a9e9a0281cd63c0479c8f158aa7ec00bd723e21"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.400586 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" event={"ID":"0d96d414-365e-41ec-bbd0-02e8d36271be","Type":"ContainerStarted","Data":"ab07277f102ad138ec0dc3c70e0efac5e2704fb819ffe7266acf83f8c3f7c308"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.403083 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tn4sr" event={"ID":"ab780a4c-5932-40c1-9383-f3d42238d2ac","Type":"ContainerStarted","Data":"c5f6d42b95a3e435697e09531b5c2829a94e7e32c1f8d38ffe47d8b7a4932b2e"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.403107 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tn4sr" event={"ID":"ab780a4c-5932-40c1-9383-f3d42238d2ac","Type":"ContainerStarted","Data":"13c5e677fdb1e84a82e32ae8155c31b24c993770ac722507a61b29fdc1b44779"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.403639 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-tn4sr" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.426412 5003 csr.go:261] certificate signing request csr-7s6vw is approved, waiting to be issued Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.426439 5003 csr.go:257] certificate signing request csr-7s6vw is issued Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.427556 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.428660 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:29.928633842 +0000 UTC m=+145.469859433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.430626 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" event={"ID":"1739818d-5558-4901-9b9d-3d735f5f30e3","Type":"ContainerStarted","Data":"4e96d953b59ac102328f16e1455c86c6894e7fba40add7e5331917bf3cfe0b0b"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.459179 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-8wskq" podStartSLOduration=9.459155092 podStartE2EDuration="9.459155092s" podCreationTimestamp="2026-01-26 10:45:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.426649135 +0000 UTC m=+144.967874706" watchObservedRunningTime="2026-01-26 10:45:29.459155092 +0000 UTC m=+145.000380653" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.466341 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" event={"ID":"beba071b-8a4b-4aae-862c-793e659eaf30","Type":"ContainerStarted","Data":"422a56505d6d53a6fbfc74c37368ae8e32ef7e082c9c10dc9f348cdf040e00a9"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.485181 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" event={"ID":"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b","Type":"ContainerStarted","Data":"59aff8e0d438e714ff608c3fd307293676fd30c50cb9a1281c7134c04f5d8ae3"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.487322 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-28mrx" event={"ID":"0118bd40-a7ee-4622-913b-7395962ac6b8","Type":"ContainerStarted","Data":"c5daf6b54cc05506df735cfa7d37a236fcbfcfab43dde51986111e8ed9441ff5"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.494787 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-tn4sr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.494845 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tn4sr" podUID="ab780a4c-5932-40c1-9383-f3d42238d2ac" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.507963 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" event={"ID":"a3f59cd7-44a7-4d88-a8bb-7108b70efa58","Type":"ContainerStarted","Data":"08740a078e7d6b3529d973c5e435c6979831e7499be250068ec54714b1c82fa5"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.508910 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.510524 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" event={"ID":"7dcb2031-bbdf-4c68-9d63-694bd2907756","Type":"ContainerStarted","Data":"0d6e0c2020b1f68267f912e42ced6d40bcdb98a299cd235b4ae910f0ef8bc6eb"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.528496 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.530385 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.030373773 +0000 UTC m=+145.571599334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.552190 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" event={"ID":"eb1caca0-0426-492d-b7bc-7a074b5e86ca","Type":"ContainerStarted","Data":"1bedea05260bcd2b502f65497f3c075bed327848e16974abef06c1432cc33b17"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.552251 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" event={"ID":"eb1caca0-0426-492d-b7bc-7a074b5e86ca","Type":"ContainerStarted","Data":"2b147e174a76aecd1435b314506769df4e4bfff95ddf1aab80403c3d008c9595"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.552933 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xsgg6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.552965 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" podUID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.557859 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:29 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:29 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:29 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.557909 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.581747 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-tn4sr" podStartSLOduration=125.581728798 podStartE2EDuration="2m5.581728798s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.49485911 +0000 UTC m=+145.036084671" watchObservedRunningTime="2026-01-26 10:45:29.581728798 +0000 UTC m=+145.122954359" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.586424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s9544" event={"ID":"25c939a4-4ebb-4ce6-a99d-3e9108c444cd","Type":"ContainerStarted","Data":"fa0219f074205f186a2669f539134f9415450cc3a277b53d010f94969f66c8e1"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.587583 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.603086 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" event={"ID":"d768c952-394c-4b6e-b4d9-7dbc838cefac","Type":"ContainerStarted","Data":"4804986fec2637930f64986b5fd848485f1fa474c99ae0d4f46232310c3a13cb"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.603963 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.614992 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6sl62" podStartSLOduration=125.614975696 podStartE2EDuration="2m5.614975696s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.611860648 +0000 UTC m=+145.153086209" watchObservedRunningTime="2026-01-26 10:45:29.614975696 +0000 UTC m=+145.156201257" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.618455 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" event={"ID":"d95fc008-94a6-40af-b7bf-a55d2920775c","Type":"ContainerStarted","Data":"037fe86891cc3890f13ab23470cbafb07250fd8cbe8d0712be87333b0dc1545c"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.618950 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.627734 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" podStartSLOduration=124.62771358 podStartE2EDuration="2m4.62771358s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.583935651 +0000 UTC m=+145.125161212" watchObservedRunningTime="2026-01-26 10:45:29.62771358 +0000 UTC m=+145.168939131" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.631459 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" event={"ID":"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f","Type":"ContainerStarted","Data":"5e182cc50d8d90ad6d334148196f414539c69f89cbe91c7c9fca03c8122b16e1"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.631509 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" event={"ID":"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f","Type":"ContainerStarted","Data":"3a16dc7ac700f8013a95f39ba181fd414c40515cfefcdba762b9545ec835a175"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.635747 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.636956 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.136940143 +0000 UTC m=+145.678165704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.655477 5003 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xphsr container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.655531 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" podUID="d768c952-394c-4b6e-b4d9-7dbc838cefac" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.656473 5003 patch_prober.go:28] interesting pod/console-operator-58897d9998-s9544 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.656491 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-s9544" podUID="25c939a4-4ebb-4ce6-a99d-3e9108c444cd" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.656560 5003 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-djnsq container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.656573 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" podUID="d95fc008-94a6-40af-b7bf-a55d2920775c" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.664383 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" event={"ID":"b43bbdc1-9062-4460-85e7-4de472e0fd06","Type":"ContainerStarted","Data":"208f00b4a48feddc814d50df60656dc80b98a529ce5db0af973af72a1d580f17"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.664424 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.671465 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" podStartSLOduration=124.671453037 podStartE2EDuration="2m4.671453037s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.669271555 +0000 UTC m=+145.210497116" watchObservedRunningTime="2026-01-26 10:45:29.671453037 +0000 UTC m=+145.212678598" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.671715 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" podStartSLOduration=124.671708625 podStartE2EDuration="2m4.671708625s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.642239044 +0000 UTC m=+145.183464605" watchObservedRunningTime="2026-01-26 10:45:29.671708625 +0000 UTC m=+145.212934186" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.675836 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" event={"ID":"d3839473-1e0c-4987-a025-7be16d2e6006","Type":"ContainerStarted","Data":"91ea17ae4f0621b016bd9a0a0eaf7310a2169d14129f5e3affcf04e51e6cf804"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.684146 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" event={"ID":"4e429f86-b4b3-4de7-8556-ce5973ea48e0","Type":"ContainerStarted","Data":"24360934bba12cb9dca5441a93c9d0ecd17b6a4a85e6f07f1a07d9d1a350a74b"} Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.715240 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" podStartSLOduration=125.715224156 podStartE2EDuration="2m5.715224156s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.714022861 +0000 UTC m=+145.255248412" watchObservedRunningTime="2026-01-26 10:45:29.715224156 +0000 UTC m=+145.256449717" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.716734 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.739201 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.742731 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.24271219 +0000 UTC m=+145.783937751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.787720 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" podStartSLOduration=124.787701283 podStartE2EDuration="2m4.787701283s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.757379088 +0000 UTC m=+145.298604649" watchObservedRunningTime="2026-01-26 10:45:29.787701283 +0000 UTC m=+145.328926844" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.838711 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-s9544" podStartSLOduration=125.838692517 podStartE2EDuration="2m5.838692517s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.798572563 +0000 UTC m=+145.339798124" watchObservedRunningTime="2026-01-26 10:45:29.838692517 +0000 UTC m=+145.379918078" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.842460 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.844395 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.344378599 +0000 UTC m=+145.885604160 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.866773 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" podStartSLOduration=124.866757818 podStartE2EDuration="2m4.866757818s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.837083751 +0000 UTC m=+145.378309302" watchObservedRunningTime="2026-01-26 10:45:29.866757818 +0000 UTC m=+145.407983379" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.867973 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" podStartSLOduration=29.867966202 podStartE2EDuration="29.867966202s" podCreationTimestamp="2026-01-26 10:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.865356068 +0000 UTC m=+145.406581629" watchObservedRunningTime="2026-01-26 10:45:29.867966202 +0000 UTC m=+145.409191763" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.894968 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8wf52" podStartSLOduration=125.894952472 podStartE2EDuration="2m5.894952472s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.887073617 +0000 UTC m=+145.428299178" watchObservedRunningTime="2026-01-26 10:45:29.894952472 +0000 UTC m=+145.436178033" Jan 26 10:45:29 crc kubenswrapper[5003]: I0126 10:45:29.954769 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:29 crc kubenswrapper[5003]: E0126 10:45:29.955213 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.45519975 +0000 UTC m=+145.996425311 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.055925 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.056268 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.556254483 +0000 UTC m=+146.097480044 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.157138 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.157495 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.65747995 +0000 UTC m=+146.198705511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.258424 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.258967 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.758945994 +0000 UTC m=+146.300171555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.361124 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.361458 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.861447137 +0000 UTC m=+146.402672698 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.428209 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-26 10:40:29 +0000 UTC, rotation deadline is 2026-10-17 12:37:25.512322225 +0000 UTC Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.428251 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6337h51m55.084073912s for next certificate rotation Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.462628 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.462868 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.962821439 +0000 UTC m=+146.504046990 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.462938 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.463272 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:30.963255921 +0000 UTC m=+146.504481482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.554184 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:30 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:30 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:30 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.554249 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.563821 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.564046 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.064017345 +0000 UTC m=+146.605242916 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.564206 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.564642 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.064630383 +0000 UTC m=+146.605856004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.665891 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.666101 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.166063746 +0000 UTC m=+146.707289307 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.666486 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.666826 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.166811207 +0000 UTC m=+146.708036768 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.690486 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" event={"ID":"aab55b24-007d-4dfb-a8a2-624b813920f9","Type":"ContainerStarted","Data":"829f8472eb0174ddce6bf6f8b4c3aec533b80a9db58234fac0668dee523a9c17"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.695030 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-28mrx" event={"ID":"0118bd40-a7ee-4622-913b-7395962ac6b8","Type":"ContainerStarted","Data":"be81dd665d3fc7feda8bc711adf86d58e6a6109efbeadae87b41638b61b4cf63"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.695590 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.700300 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" event={"ID":"4e429f86-b4b3-4de7-8556-ce5973ea48e0","Type":"ContainerStarted","Data":"d4ac70335cfe92fcac875517a5d99ff439239f3ac8611e92293e57f928b67e3f"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.703635 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" event={"ID":"16dd94f1-87e0-4fbd-910e-af2ece0fd525","Type":"ContainerStarted","Data":"75e8b3f1b0396fefaaadaf9285f8911b0f0c70e23ee9f2e61c8114731f62ba78"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.707331 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" event={"ID":"94f616e2-cfa7-4b54-b6f8-4b07df5b714f","Type":"ContainerStarted","Data":"589422e08742a9a63eeca27f2ef893011d5ac1350f5d81d698514851830ad861"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.707370 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" event={"ID":"94f616e2-cfa7-4b54-b6f8-4b07df5b714f","Type":"ContainerStarted","Data":"c802bfa137f605127f35ad15dd7016a046357e11330dfd5ad6e18e7943c0aa26"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.710826 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" event={"ID":"a3f59cd7-44a7-4d88-a8bb-7108b70efa58","Type":"ContainerStarted","Data":"4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.711687 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xsgg6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.711725 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" podUID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.717662 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" event={"ID":"d768c952-394c-4b6e-b4d9-7dbc838cefac","Type":"ContainerStarted","Data":"23f2ca41825230cdf42210b990a69d96866f6c8e27b2d1db2dee7e9f2f3cedb5"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.720154 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" event={"ID":"d95fc008-94a6-40af-b7bf-a55d2920775c","Type":"ContainerStarted","Data":"4734e86483fb492a1a9b4c505288c64728513d0784728c303369fdaff26556f7"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.724878 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" event={"ID":"35326ae8-7aba-468e-abf7-aab37519fc34","Type":"ContainerStarted","Data":"250ed1482467fe18055aec1403215942ee21608e3732045b78bc7f9402c42cbf"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.727438 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bhzbs" podStartSLOduration=125.727420326 podStartE2EDuration="2m5.727420326s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:30.726852409 +0000 UTC m=+146.268077970" watchObservedRunningTime="2026-01-26 10:45:30.727420326 +0000 UTC m=+146.268645887" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.728302 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" event={"ID":"1739818d-5558-4901-9b9d-3d735f5f30e3","Type":"ContainerStarted","Data":"01182ce05e7183bbd08c59b356783f6923a66b2f2916fceeef19e3d84322ecd8"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.728348 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" event={"ID":"1739818d-5558-4901-9b9d-3d735f5f30e3","Type":"ContainerStarted","Data":"2bf486ac4f37d7120aa616cc68149ccc18fda159426814b3578c0080f86cbe43"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.728532 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hbddz" podStartSLOduration=126.728524017 podStartE2EDuration="2m6.728524017s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:29.9464312 +0000 UTC m=+145.487656761" watchObservedRunningTime="2026-01-26 10:45:30.728524017 +0000 UTC m=+146.269749578" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.732507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" event={"ID":"7dcb2031-bbdf-4c68-9d63-694bd2907756","Type":"ContainerStarted","Data":"98610c7da4735666ee0435aa67f33fa2eaf692b47805e3fe822ba059e97f922e"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.735481 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-tlchc" event={"ID":"0d96d414-365e-41ec-bbd0-02e8d36271be","Type":"ContainerStarted","Data":"1032946ac8e275f77c98f2c7b3551ef10cf91cc321578d85438fd69cef255c74"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.736329 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-djnsq" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.740119 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" event={"ID":"d2100df7-b013-4d8a-8ab4-18e2506bdd02","Type":"ContainerStarted","Data":"0e0eed290e32afe7ced9cb584fae6ca8d2424228d17b9f11df161bbd21912e90"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.754813 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t55b4" event={"ID":"eb1caca0-0426-492d-b7bc-7a074b5e86ca","Type":"ContainerStarted","Data":"0f4fd0635ba74abb875963348bc7115a8b63b7c45314d36a54994ebc0027c14a"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.764862 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" event={"ID":"8479f1ce-9bbc-44ba-b73d-7c2cb1929f8b","Type":"ContainerStarted","Data":"51f7ddde95e319b5fd94991ca5f5336547f8478bf1d06b856b659172aea89e7e"} Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.767757 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-tn4sr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.767803 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tn4sr" podUID="ab780a4c-5932-40c1-9383-f3d42238d2ac" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.768561 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.768819 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.268790576 +0000 UTC m=+146.810016147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.768944 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.769838 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.269827845 +0000 UTC m=+146.811053396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.804681 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" podStartSLOduration=125.804662399 podStartE2EDuration="2m5.804662399s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:30.802339953 +0000 UTC m=+146.343565514" watchObservedRunningTime="2026-01-26 10:45:30.804662399 +0000 UTC m=+146.345887960" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.846480 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-28mrx" podStartSLOduration=9.846463711 podStartE2EDuration="9.846463711s" podCreationTimestamp="2026-01-26 10:45:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:30.845855844 +0000 UTC m=+146.387081405" watchObservedRunningTime="2026-01-26 10:45:30.846463711 +0000 UTC m=+146.387689272" Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.869803 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.871799 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.371784593 +0000 UTC m=+146.913010154 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:30 crc kubenswrapper[5003]: I0126 10:45:30.972143 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:30 crc kubenswrapper[5003]: E0126 10:45:30.972498 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.472487295 +0000 UTC m=+147.013712856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.000028 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" podStartSLOduration=127.00001151 podStartE2EDuration="2m7.00001151s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:30.959186426 +0000 UTC m=+146.500411987" watchObservedRunningTime="2026-01-26 10:45:31.00001151 +0000 UTC m=+146.541237091" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.001187 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zdzvq" podStartSLOduration=126.001181304 podStartE2EDuration="2m6.001181304s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:30.999163876 +0000 UTC m=+146.540389447" watchObservedRunningTime="2026-01-26 10:45:31.001181304 +0000 UTC m=+146.542406865" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.040483 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-s9544" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.073006 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.073400 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.573383953 +0000 UTC m=+147.114609514 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.091491 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-672x8" podStartSLOduration=126.091473909 podStartE2EDuration="2m6.091473909s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:31.059656362 +0000 UTC m=+146.600881923" watchObservedRunningTime="2026-01-26 10:45:31.091473909 +0000 UTC m=+146.632699470" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.128906 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" podStartSLOduration=126.128887156 podStartE2EDuration="2m6.128887156s" podCreationTimestamp="2026-01-26 10:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:31.128176936 +0000 UTC m=+146.669402497" watchObservedRunningTime="2026-01-26 10:45:31.128887156 +0000 UTC m=+146.670112717" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.169940 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-54rgc" podStartSLOduration=127.169921897 podStartE2EDuration="2m7.169921897s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:31.167452416 +0000 UTC m=+146.708677977" watchObservedRunningTime="2026-01-26 10:45:31.169921897 +0000 UTC m=+146.711147458" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.175263 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.175531 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.675518216 +0000 UTC m=+147.216743777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.275978 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.276342 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.776328012 +0000 UTC m=+147.317553573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.357887 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-rctvj" podStartSLOduration=127.357867937 podStartE2EDuration="2m7.357867937s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:31.299654357 +0000 UTC m=+146.840879908" watchObservedRunningTime="2026-01-26 10:45:31.357867937 +0000 UTC m=+146.899093498" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.380155 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.380542 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.880527124 +0000 UTC m=+147.421752685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.481052 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.481215 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.981172014 +0000 UTC m=+147.522397575 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.481274 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.481606 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:31.981598126 +0000 UTC m=+147.522823687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.553325 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:31 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:31 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:31 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.553400 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.582574 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.582696 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.082679089 +0000 UTC m=+147.623904650 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.582876 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.583154 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.083145283 +0000 UTC m=+147.624370834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.684123 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.684321 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.184275407 +0000 UTC m=+147.725500968 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.684739 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.685004 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.184990057 +0000 UTC m=+147.726215618 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.718365 5003 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xphsr container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.718427 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" podUID="d768c952-394c-4b6e-b4d9-7dbc838cefac" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.760248 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9d9xc" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.786183 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.786942 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.286915615 +0000 UTC m=+147.828141176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.806246 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" event={"ID":"7dcb2031-bbdf-4c68-9d63-694bd2907756","Type":"ContainerStarted","Data":"fb02e477d9cbdc88692c0a9df17d9a75fd97fd6cf4ca4d0d6f9f43f3cbb33ca5"} Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.806304 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" event={"ID":"7dcb2031-bbdf-4c68-9d63-694bd2907756","Type":"ContainerStarted","Data":"fbc2e59b7d306103f85e9fb9ab26dd3820dc081aa7551476bd2b6923a97a69c0"} Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.809709 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.810025 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-tn4sr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.810072 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tn4sr" podUID="ab780a4c-5932-40c1-9383-f3d42238d2ac" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.810749 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xsgg6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.810796 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" podUID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.819505 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xphsr" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.820239 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rgpfr" Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.888391 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.890566 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.390553221 +0000 UTC m=+147.931778772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.989969 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.990141 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.49011648 +0000 UTC m=+148.031342041 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:31 crc kubenswrapper[5003]: I0126 10:45:31.990376 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:31 crc kubenswrapper[5003]: E0126 10:45:31.990739 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.490726068 +0000 UTC m=+148.031951619 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.059053 5003 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.091523 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:32 crc kubenswrapper[5003]: E0126 10:45:32.091712 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.591685297 +0000 UTC m=+148.132910858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.091762 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.091846 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.091900 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:32 crc kubenswrapper[5003]: E0126 10:45:32.092094 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.592078418 +0000 UTC m=+148.133304059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.093106 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.101050 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.194011 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:32 crc kubenswrapper[5003]: E0126 10:45:32.194157 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.694136449 +0000 UTC m=+148.235362010 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.194271 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.194335 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.194396 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:32 crc kubenswrapper[5003]: E0126 10:45:32.194755 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.694741887 +0000 UTC m=+148.235967448 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2kvjq" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.210009 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.210357 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.215366 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.225703 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.233613 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.279022 5003 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-26T10:45:32.059080437Z","Handler":null,"Name":""} Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.303852 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:32 crc kubenswrapper[5003]: E0126 10:45:32.304232 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 10:45:32.804206959 +0000 UTC m=+148.345432520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.395207 5003 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.395243 5003 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.409499 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.416293 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.416342 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.474686 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.475502 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.482067 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.486777 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.490379 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.498709 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2kvjq\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.510503 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.524448 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.559102 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:32 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:32 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:32 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.559160 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.611910 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.611967 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.661651 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.662331 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.669433 5003 patch_prober.go:28] interesting pod/console-f9d7485db-cpxlv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.669497 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cpxlv" podUID="a8192c61-0b99-47a2-af6c-aee9eff089f1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.670495 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4nzsj"] Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.674872 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.685076 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.692882 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4nzsj"] Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.718098 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.724852 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.725452 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.740440 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.743334 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.743367 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.775009 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.792972 5003 patch_prober.go:28] interesting pod/apiserver-76f77b778f-jjlwz container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]log ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]etcd ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/generic-apiserver-start-informers ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/max-in-flight-filter ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 26 10:45:32 crc kubenswrapper[5003]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 26 10:45:32 crc kubenswrapper[5003]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/project.openshift.io-projectcache ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-startinformers ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 26 10:45:32 crc kubenswrapper[5003]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 26 10:45:32 crc kubenswrapper[5003]: livez check failed Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.793052 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" podUID="35326ae8-7aba-468e-abf7-aab37519fc34" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.825883 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.825924 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.826597 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-utilities\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.826641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-catalog-content\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.827582 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ckvh\" (UniqueName: \"kubernetes.io/projected/056db06f-766d-4393-87b8-4148b3f4c3c9-kube-api-access-9ckvh\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.842392 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" event={"ID":"7dcb2031-bbdf-4c68-9d63-694bd2907756","Type":"ContainerStarted","Data":"ab9714a6a9c2c57fca8b38390914b31a65ce72d4094401a2ef6e6191c40b6739"} Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.848644 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.865620 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.876978 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nlxw6"] Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.880318 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.891474 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.892192 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-s7vxm" podStartSLOduration=12.892173439 podStartE2EDuration="12.892173439s" podCreationTimestamp="2026-01-26 10:45:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:32.890708897 +0000 UTC m=+148.431934458" watchObservedRunningTime="2026-01-26 10:45:32.892173439 +0000 UTC m=+148.433399000" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.897178 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nlxw6"] Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.931003 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-utilities\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.931054 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-catalog-content\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.931152 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ckvh\" (UniqueName: \"kubernetes.io/projected/056db06f-766d-4393-87b8-4148b3f4c3c9-kube-api-access-9ckvh\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.932464 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-catalog-content\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.932925 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-utilities\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:32 crc kubenswrapper[5003]: I0126 10:45:32.973074 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ckvh\" (UniqueName: \"kubernetes.io/projected/056db06f-766d-4393-87b8-4148b3f4c3c9-kube-api-access-9ckvh\") pod \"community-operators-4nzsj\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:32.999423 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.000246 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.006503 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.006956 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.007264 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.032030 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-catalog-content\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.032244 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-utilities\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.032401 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqnqd\" (UniqueName: \"kubernetes.io/projected/a6f195f7-8805-422e-b316-c57c71a27a38-kube-api-access-fqnqd\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.039622 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.040072 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.086355 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xfvlh"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.087536 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.092093 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xfvlh"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.147402 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqnqd\" (UniqueName: \"kubernetes.io/projected/a6f195f7-8805-422e-b316-c57c71a27a38-kube-api-access-fqnqd\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.147771 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.147815 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-catalog-content\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.147874 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-utilities\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.147904 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.148410 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-catalog-content\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.149533 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-utilities\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.183667 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqnqd\" (UniqueName: \"kubernetes.io/projected/a6f195f7-8805-422e-b316-c57c71a27a38-kube-api-access-fqnqd\") pod \"certified-operators-nlxw6\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.248615 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.248683 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5shrs\" (UniqueName: \"kubernetes.io/projected/c837509e-b233-4ed4-9c00-49f01de19953-kube-api-access-5shrs\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.248828 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-catalog-content\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.248866 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.248931 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-utilities\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.248959 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.251891 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.254977 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7sz59"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.255965 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.266206 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.284730 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7sz59"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.286140 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.330239 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.351520 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-utilities\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.351561 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-utilities\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.351613 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7n25\" (UniqueName: \"kubernetes.io/projected/259b94cb-d033-4789-a0b6-dbfc3a361d0a-kube-api-access-n7n25\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.351668 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5shrs\" (UniqueName: \"kubernetes.io/projected/c837509e-b233-4ed4-9c00-49f01de19953-kube-api-access-5shrs\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.351694 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-catalog-content\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.351735 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-catalog-content\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.352130 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-catalog-content\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.352816 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-utilities\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.391778 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5shrs\" (UniqueName: \"kubernetes.io/projected/c837509e-b233-4ed4-9c00-49f01de19953-kube-api-access-5shrs\") pod \"community-operators-xfvlh\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.414787 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2kvjq"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.424622 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:45:33 crc kubenswrapper[5003]: W0126 10:45:33.448936 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda56181a5_de19_48f8_8a39_73d3ea6c9d1e.slice/crio-15962a74288ae2ba4fda966b5fb83af41ec5b592de07af7a667d08caf5abcaba WatchSource:0}: Error finding container 15962a74288ae2ba4fda966b5fb83af41ec5b592de07af7a667d08caf5abcaba: Status 404 returned error can't find the container with id 15962a74288ae2ba4fda966b5fb83af41ec5b592de07af7a667d08caf5abcaba Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.452388 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-catalog-content\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.452478 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-utilities\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.452509 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7n25\" (UniqueName: \"kubernetes.io/projected/259b94cb-d033-4789-a0b6-dbfc3a361d0a-kube-api-access-n7n25\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.453534 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-catalog-content\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.453798 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-utilities\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.504428 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7n25\" (UniqueName: \"kubernetes.io/projected/259b94cb-d033-4789-a0b6-dbfc3a361d0a-kube-api-access-n7n25\") pod \"certified-operators-7sz59\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.550829 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4nzsj"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.555864 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:33 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:33 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:33 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.555912 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.586057 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:45:33 crc kubenswrapper[5003]: W0126 10:45:33.586561 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod056db06f_766d_4393_87b8_4148b3f4c3c9.slice/crio-7414fcc63b56498f28c3649fbf658944eea8991363a5e49e911474d6c8391fc8 WatchSource:0}: Error finding container 7414fcc63b56498f28c3649fbf658944eea8991363a5e49e911474d6c8391fc8: Status 404 returned error can't find the container with id 7414fcc63b56498f28c3649fbf658944eea8991363a5e49e911474d6c8391fc8 Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.646111 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nlxw6"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.781029 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xfvlh"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.876756 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfvlh" event={"ID":"c837509e-b233-4ed4-9c00-49f01de19953","Type":"ContainerStarted","Data":"d35d562e4caaebbdfad10703173ebbbe414579c525bf73e5d975f121156e8bcc"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.882966 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ff49301beef792c88509526067571c9ee833e0e1f2c87a00e92e5bef6a71cfd4"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.883013 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c72ea2d00313399b466e73e2706124598230a6e126ee01968c63b7f11caf1734"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.883917 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.906194 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlxw6" event={"ID":"a6f195f7-8805-422e-b316-c57c71a27a38","Type":"ContainerStarted","Data":"314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.906307 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlxw6" event={"ID":"a6f195f7-8805-422e-b316-c57c71a27a38","Type":"ContainerStarted","Data":"3976cb8b3ece0126119bba72cc9201d03e1e31330b7b015502fa12a93eeb560a"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.910494 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.918517 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"619f360271569baa71a0f9392a5dce61536396d792ba832b88537e206ddafd85"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.918565 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"5e3fcafdfbe806c0d951b494344dd786cc37e97e7d194ea60ebcc7e8d18106d4"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.926343 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.935546 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1b7e61bcdedba5882ce1e1fee1bdefbce6b0bec6161f5e1d8be9e239bac8c73c"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.935620 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"173fc5d6a9739613bf83caab7c48be3b3c479666ad5c193d5200e70f5e1eaaac"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.948481 5003 generic.go:334] "Generic (PLEG): container finished" podID="2d39e382-cc5b-4e9e-b66f-87fe37efbd4f" containerID="5e182cc50d8d90ad6d334148196f414539c69f89cbe91c7c9fca03c8122b16e1" exitCode=0 Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.948617 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" event={"ID":"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f","Type":"ContainerDied","Data":"5e182cc50d8d90ad6d334148196f414539c69f89cbe91c7c9fca03c8122b16e1"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.959631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4","Type":"ContainerStarted","Data":"07ad8176d2f71547928a750c4bc3ffa70853bfb3e1f4df8df60606f2d214adef"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.959675 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4","Type":"ContainerStarted","Data":"909bf018cc4820b3d73b780dfa763d51140d82fca35b4965b2270b46f78c0fc5"} Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.989006 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7sz59"] Jan 26 10:45:33 crc kubenswrapper[5003]: I0126 10:45:33.993976 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4nzsj" event={"ID":"056db06f-766d-4393-87b8-4148b3f4c3c9","Type":"ContainerStarted","Data":"7414fcc63b56498f28c3649fbf658944eea8991363a5e49e911474d6c8391fc8"} Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.008452 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" event={"ID":"a56181a5-de19-48f8-8a39-73d3ea6c9d1e","Type":"ContainerStarted","Data":"f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41"} Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.008503 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" event={"ID":"a56181a5-de19-48f8-8a39-73d3ea6c9d1e","Type":"ContainerStarted","Data":"15962a74288ae2ba4fda966b5fb83af41ec5b592de07af7a667d08caf5abcaba"} Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.024466 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2rn99" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.049397 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.049379754 podStartE2EDuration="2.049379754s" podCreationTimestamp="2026-01-26 10:45:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:34.049161097 +0000 UTC m=+149.590386678" watchObservedRunningTime="2026-01-26 10:45:34.049379754 +0000 UTC m=+149.590605315" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.118041 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" podStartSLOduration=130.118022081 podStartE2EDuration="2m10.118022081s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:45:34.117859017 +0000 UTC m=+149.659084578" watchObservedRunningTime="2026-01-26 10:45:34.118022081 +0000 UTC m=+149.659247642" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.556771 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:34 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:34 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:34 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.556830 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.654560 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qbjvl"] Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.655533 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.659009 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.676056 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbjvl"] Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.783253 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksvrq\" (UniqueName: \"kubernetes.io/projected/afdef7f7-32b5-4976-881a-398dc09ac9bd-kube-api-access-ksvrq\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.783557 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-utilities\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.783597 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-catalog-content\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.885032 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-utilities\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.885086 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-catalog-content\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.885147 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksvrq\" (UniqueName: \"kubernetes.io/projected/afdef7f7-32b5-4976-881a-398dc09ac9bd-kube-api-access-ksvrq\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.885738 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-catalog-content\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.885831 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-utilities\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.901764 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksvrq\" (UniqueName: \"kubernetes.io/projected/afdef7f7-32b5-4976-881a-398dc09ac9bd-kube-api-access-ksvrq\") pod \"redhat-marketplace-qbjvl\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:34 crc kubenswrapper[5003]: I0126 10:45:34.968133 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.021096 5003 generic.go:334] "Generic (PLEG): container finished" podID="4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4" containerID="07ad8176d2f71547928a750c4bc3ffa70853bfb3e1f4df8df60606f2d214adef" exitCode=0 Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.021168 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4","Type":"ContainerDied","Data":"07ad8176d2f71547928a750c4bc3ffa70853bfb3e1f4df8df60606f2d214adef"} Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.023418 5003 generic.go:334] "Generic (PLEG): container finished" podID="a6f195f7-8805-422e-b316-c57c71a27a38" containerID="314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0" exitCode=0 Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.023491 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlxw6" event={"ID":"a6f195f7-8805-422e-b316-c57c71a27a38","Type":"ContainerDied","Data":"314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0"} Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.051944 5003 generic.go:334] "Generic (PLEG): container finished" podID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerID="57718a922d75adf8bfcb98dddf5e70d80cfb731cd0fd714423a17bf87c8a35d5" exitCode=0 Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.052058 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sz59" event={"ID":"259b94cb-d033-4789-a0b6-dbfc3a361d0a","Type":"ContainerDied","Data":"57718a922d75adf8bfcb98dddf5e70d80cfb731cd0fd714423a17bf87c8a35d5"} Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.052087 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sz59" event={"ID":"259b94cb-d033-4789-a0b6-dbfc3a361d0a","Type":"ContainerStarted","Data":"0d2f2e39148f96d768cdb6dce9c726bad76a6fad1b38d1e25c65f2a706fdca5f"} Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.059638 5003 generic.go:334] "Generic (PLEG): container finished" podID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerID="5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975" exitCode=0 Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.059701 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4nzsj" event={"ID":"056db06f-766d-4393-87b8-4148b3f4c3c9","Type":"ContainerDied","Data":"5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975"} Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.063255 5003 generic.go:334] "Generic (PLEG): container finished" podID="c837509e-b233-4ed4-9c00-49f01de19953" containerID="780a6ca6b697263f9c5581f157b1f638432aed615cfd3decfa40e376351fe8d4" exitCode=0 Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.063321 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfvlh" event={"ID":"c837509e-b233-4ed4-9c00-49f01de19953","Type":"ContainerDied","Data":"780a6ca6b697263f9c5581f157b1f638432aed615cfd3decfa40e376351fe8d4"} Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.063674 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nd6jd"] Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.065008 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.066363 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8e5c341c-99a9-4d1d-a354-aa23722a8d13","Type":"ContainerStarted","Data":"18f84b74cc016e376793000a5a541e666e0b7e6d7441ee2547ac1f6bf11acc5a"} Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.066901 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.090998 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nd6jd"] Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.192215 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smjbj\" (UniqueName: \"kubernetes.io/projected/0fc9d3a8-b3ed-4321-a724-f0df98e10736-kube-api-access-smjbj\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.192309 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-catalog-content\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.192528 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-utilities\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.293842 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-utilities\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.294231 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smjbj\" (UniqueName: \"kubernetes.io/projected/0fc9d3a8-b3ed-4321-a724-f0df98e10736-kube-api-access-smjbj\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.294266 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-catalog-content\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.294903 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-catalog-content\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.295159 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-utilities\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.326111 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smjbj\" (UniqueName: \"kubernetes.io/projected/0fc9d3a8-b3ed-4321-a724-f0df98e10736-kube-api-access-smjbj\") pod \"redhat-marketplace-nd6jd\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.404936 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.418962 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.487438 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbjvl"] Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.505981 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28bhm\" (UniqueName: \"kubernetes.io/projected/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-kube-api-access-28bhm\") pod \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.506105 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-config-volume\") pod \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.506166 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-secret-volume\") pod \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\" (UID: \"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f\") " Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.509819 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2d39e382-cc5b-4e9e-b66f-87fe37efbd4f" (UID: "2d39e382-cc5b-4e9e-b66f-87fe37efbd4f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.510239 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-kube-api-access-28bhm" (OuterVolumeSpecName: "kube-api-access-28bhm") pod "2d39e382-cc5b-4e9e-b66f-87fe37efbd4f" (UID: "2d39e382-cc5b-4e9e-b66f-87fe37efbd4f"). InnerVolumeSpecName "kube-api-access-28bhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.510321 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-config-volume" (OuterVolumeSpecName: "config-volume") pod "2d39e382-cc5b-4e9e-b66f-87fe37efbd4f" (UID: "2d39e382-cc5b-4e9e-b66f-87fe37efbd4f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:45:35 crc kubenswrapper[5003]: W0126 10:45:35.525240 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafdef7f7_32b5_4976_881a_398dc09ac9bd.slice/crio-57800e056704c775f80bb31762939c6e47027c2abe7b65f3e52f07507ed965c1 WatchSource:0}: Error finding container 57800e056704c775f80bb31762939c6e47027c2abe7b65f3e52f07507ed965c1: Status 404 returned error can't find the container with id 57800e056704c775f80bb31762939c6e47027c2abe7b65f3e52f07507ed965c1 Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.551488 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.554611 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:35 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:35 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:35 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.554665 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.609096 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.609459 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.609470 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28bhm\" (UniqueName: \"kubernetes.io/projected/2d39e382-cc5b-4e9e-b66f-87fe37efbd4f-kube-api-access-28bhm\") on node \"crc\" DevicePath \"\"" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.741467 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nd6jd"] Jan 26 10:45:35 crc kubenswrapper[5003]: W0126 10:45:35.762189 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fc9d3a8_b3ed_4321_a724_f0df98e10736.slice/crio-cebe25e20a515b7fa20c1de92e68ea1cff4b0cf7fecf04cde54dcca959f18682 WatchSource:0}: Error finding container cebe25e20a515b7fa20c1de92e68ea1cff4b0cf7fecf04cde54dcca959f18682: Status 404 returned error can't find the container with id cebe25e20a515b7fa20c1de92e68ea1cff4b0cf7fecf04cde54dcca959f18682 Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.856547 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7j6tj"] Jan 26 10:45:35 crc kubenswrapper[5003]: E0126 10:45:35.856746 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d39e382-cc5b-4e9e-b66f-87fe37efbd4f" containerName="collect-profiles" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.856757 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d39e382-cc5b-4e9e-b66f-87fe37efbd4f" containerName="collect-profiles" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.856862 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d39e382-cc5b-4e9e-b66f-87fe37efbd4f" containerName="collect-profiles" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.857610 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.862078 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.866496 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7j6tj"] Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.886029 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.914556 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-utilities\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.915180 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-catalog-content\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.915490 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lbq9\" (UniqueName: \"kubernetes.io/projected/36a0e821-b752-4299-a9ec-1c719bdf5b2c-kube-api-access-8lbq9\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.952657 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-tn4sr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.952711 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tn4sr" podUID="ab780a4c-5932-40c1-9383-f3d42238d2ac" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.953449 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-tn4sr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Jan 26 10:45:35 crc kubenswrapper[5003]: I0126 10:45:35.953517 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tn4sr" podUID="ab780a4c-5932-40c1-9383-f3d42238d2ac" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.35:8080/\": dial tcp 10.217.0.35:8080: connect: connection refused" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.017700 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-utilities\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.017848 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-catalog-content\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.017878 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lbq9\" (UniqueName: \"kubernetes.io/projected/36a0e821-b752-4299-a9ec-1c719bdf5b2c-kube-api-access-8lbq9\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.018594 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-catalog-content\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.018625 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-utilities\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.039391 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lbq9\" (UniqueName: \"kubernetes.io/projected/36a0e821-b752-4299-a9ec-1c719bdf5b2c-kube-api-access-8lbq9\") pod \"redhat-operators-7j6tj\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.083511 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e5c341c-99a9-4d1d-a354-aa23722a8d13" containerID="d476c6470b60adeea87034f03b962e53d7ee869c338efc2267c34a1040ce39ec" exitCode=0 Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.083581 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8e5c341c-99a9-4d1d-a354-aa23722a8d13","Type":"ContainerDied","Data":"d476c6470b60adeea87034f03b962e53d7ee869c338efc2267c34a1040ce39ec"} Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.088414 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" event={"ID":"2d39e382-cc5b-4e9e-b66f-87fe37efbd4f","Type":"ContainerDied","Data":"3a16dc7ac700f8013a95f39ba181fd414c40515cfefcdba762b9545ec835a175"} Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.088449 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a16dc7ac700f8013a95f39ba181fd414c40515cfefcdba762b9545ec835a175" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.088532 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490405-ljdkv" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.106371 5003 generic.go:334] "Generic (PLEG): container finished" podID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerID="a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c" exitCode=0 Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.106447 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbjvl" event={"ID":"afdef7f7-32b5-4976-881a-398dc09ac9bd","Type":"ContainerDied","Data":"a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c"} Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.106476 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbjvl" event={"ID":"afdef7f7-32b5-4976-881a-398dc09ac9bd","Type":"ContainerStarted","Data":"57800e056704c775f80bb31762939c6e47027c2abe7b65f3e52f07507ed965c1"} Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.114584 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd6jd" event={"ID":"0fc9d3a8-b3ed-4321-a724-f0df98e10736","Type":"ContainerStarted","Data":"74aa587e54ab82cf08024a21f59294d1b671957104ee2ede917ada84d0f8edb8"} Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.114655 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd6jd" event={"ID":"0fc9d3a8-b3ed-4321-a724-f0df98e10736","Type":"ContainerStarted","Data":"cebe25e20a515b7fa20c1de92e68ea1cff4b0cf7fecf04cde54dcca959f18682"} Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.197878 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.273379 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mh5fg"] Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.274603 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.278921 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mh5fg"] Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.328037 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-utilities\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.328084 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-catalog-content\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.328112 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rtfn\" (UniqueName: \"kubernetes.io/projected/f4378c6f-f796-4576-91b9-87a7ac43193e-kube-api-access-6rtfn\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.429208 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-utilities\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.429549 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-catalog-content\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.429616 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rtfn\" (UniqueName: \"kubernetes.io/projected/f4378c6f-f796-4576-91b9-87a7ac43193e-kube-api-access-6rtfn\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.429867 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-utilities\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.430116 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-catalog-content\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.438900 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.458946 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rtfn\" (UniqueName: \"kubernetes.io/projected/f4378c6f-f796-4576-91b9-87a7ac43193e-kube-api-access-6rtfn\") pod \"redhat-operators-mh5fg\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.531067 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kubelet-dir\") pod \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\" (UID: \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\") " Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.531192 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kube-api-access\") pod \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\" (UID: \"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4\") " Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.531184 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4" (UID: "4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.531509 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.534218 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4" (UID: "4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.556261 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:36 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:36 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:36 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.556354 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.602381 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.632827 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 10:45:36 crc kubenswrapper[5003]: I0126 10:45:36.703621 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7j6tj"] Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.085095 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mh5fg"] Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.137246 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4","Type":"ContainerDied","Data":"909bf018cc4820b3d73b780dfa763d51140d82fca35b4965b2270b46f78c0fc5"} Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.137324 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="909bf018cc4820b3d73b780dfa763d51140d82fca35b4965b2270b46f78c0fc5" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.137430 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.142175 5003 generic.go:334] "Generic (PLEG): container finished" podID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerID="74aa587e54ab82cf08024a21f59294d1b671957104ee2ede917ada84d0f8edb8" exitCode=0 Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.142262 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd6jd" event={"ID":"0fc9d3a8-b3ed-4321-a724-f0df98e10736","Type":"ContainerDied","Data":"74aa587e54ab82cf08024a21f59294d1b671957104ee2ede917ada84d0f8edb8"} Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.146424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh5fg" event={"ID":"f4378c6f-f796-4576-91b9-87a7ac43193e","Type":"ContainerStarted","Data":"b45d974fd0b9ec0d134a7a5f1a38cfb392e6197c9f832b4ce019d85ba6e567b6"} Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.150073 5003 generic.go:334] "Generic (PLEG): container finished" podID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerID="d914102da788edef2af168a55c3381d0e6c24fc8ca1bb32cef50fb16c14b4f9a" exitCode=0 Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.151929 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j6tj" event={"ID":"36a0e821-b752-4299-a9ec-1c719bdf5b2c","Type":"ContainerDied","Data":"d914102da788edef2af168a55c3381d0e6c24fc8ca1bb32cef50fb16c14b4f9a"} Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.151978 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j6tj" event={"ID":"36a0e821-b752-4299-a9ec-1c719bdf5b2c","Type":"ContainerStarted","Data":"5e464cd11cdbaec2dbe473b4287a74275d512878028272992f6c68d53cb3edc1"} Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.559766 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:37 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:37 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:37 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.561681 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.619923 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.748911 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.754462 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-jjlwz" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.811575 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kube-api-access\") pod \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\" (UID: \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\") " Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.811649 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kubelet-dir\") pod \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\" (UID: \"8e5c341c-99a9-4d1d-a354-aa23722a8d13\") " Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.812248 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8e5c341c-99a9-4d1d-a354-aa23722a8d13" (UID: "8e5c341c-99a9-4d1d-a354-aa23722a8d13"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.835751 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8e5c341c-99a9-4d1d-a354-aa23722a8d13" (UID: "8e5c341c-99a9-4d1d-a354-aa23722a8d13"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.913622 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:45:37 crc kubenswrapper[5003]: I0126 10:45:37.913659 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8e5c341c-99a9-4d1d-a354-aa23722a8d13-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 10:45:38 crc kubenswrapper[5003]: I0126 10:45:38.186509 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerID="ec1dee5d89d40e0ab371053534a262904a1ecfebfcb931b87d5b9f9b21f87a24" exitCode=0 Jan 26 10:45:38 crc kubenswrapper[5003]: I0126 10:45:38.186588 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh5fg" event={"ID":"f4378c6f-f796-4576-91b9-87a7ac43193e","Type":"ContainerDied","Data":"ec1dee5d89d40e0ab371053534a262904a1ecfebfcb931b87d5b9f9b21f87a24"} Jan 26 10:45:38 crc kubenswrapper[5003]: I0126 10:45:38.193244 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8e5c341c-99a9-4d1d-a354-aa23722a8d13","Type":"ContainerDied","Data":"18f84b74cc016e376793000a5a541e666e0b7e6d7441ee2547ac1f6bf11acc5a"} Jan 26 10:45:38 crc kubenswrapper[5003]: I0126 10:45:38.193298 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18f84b74cc016e376793000a5a541e666e0b7e6d7441ee2547ac1f6bf11acc5a" Jan 26 10:45:38 crc kubenswrapper[5003]: I0126 10:45:38.193662 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 10:45:38 crc kubenswrapper[5003]: I0126 10:45:38.554331 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:38 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:38 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:38 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:38 crc kubenswrapper[5003]: I0126 10:45:38.554405 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:39 crc kubenswrapper[5003]: I0126 10:45:39.040198 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:45:39 crc kubenswrapper[5003]: I0126 10:45:39.040247 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:45:39 crc kubenswrapper[5003]: I0126 10:45:39.553092 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:39 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:39 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:39 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:39 crc kubenswrapper[5003]: I0126 10:45:39.553405 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:40 crc kubenswrapper[5003]: I0126 10:45:40.553441 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:40 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:40 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:40 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:40 crc kubenswrapper[5003]: I0126 10:45:40.553631 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:40 crc kubenswrapper[5003]: I0126 10:45:40.694088 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-28mrx" Jan 26 10:45:41 crc kubenswrapper[5003]: I0126 10:45:41.554681 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:41 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:41 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:41 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:41 crc kubenswrapper[5003]: I0126 10:45:41.554750 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:42 crc kubenswrapper[5003]: I0126 10:45:42.552900 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:42 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:42 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:42 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:42 crc kubenswrapper[5003]: I0126 10:45:42.553208 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:42 crc kubenswrapper[5003]: I0126 10:45:42.663099 5003 patch_prober.go:28] interesting pod/console-f9d7485db-cpxlv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Jan 26 10:45:42 crc kubenswrapper[5003]: I0126 10:45:42.663173 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cpxlv" podUID="a8192c61-0b99-47a2-af6c-aee9eff089f1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" Jan 26 10:45:43 crc kubenswrapper[5003]: I0126 10:45:43.552562 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:43 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:43 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:43 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:43 crc kubenswrapper[5003]: I0126 10:45:43.552619 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:44 crc kubenswrapper[5003]: I0126 10:45:44.553631 5003 patch_prober.go:28] interesting pod/router-default-5444994796-grvqx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 10:45:44 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 26 10:45:44 crc kubenswrapper[5003]: [+]process-running ok Jan 26 10:45:44 crc kubenswrapper[5003]: healthz check failed Jan 26 10:45:44 crc kubenswrapper[5003]: I0126 10:45:44.554087 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-grvqx" podUID="3dc4301b-5dc4-4e39-a74b-9e46542e8dfb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 10:45:45 crc kubenswrapper[5003]: I0126 10:45:45.553135 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:45 crc kubenswrapper[5003]: I0126 10:45:45.555003 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-grvqx" Jan 26 10:45:45 crc kubenswrapper[5003]: I0126 10:45:45.967606 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-tn4sr" Jan 26 10:45:47 crc kubenswrapper[5003]: I0126 10:45:47.257458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:47 crc kubenswrapper[5003]: I0126 10:45:47.268022 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa06185d-fe5e-423a-b5a7-19e8bb7c8a60-metrics-certs\") pod \"network-metrics-daemon-4jrnq\" (UID: \"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60\") " pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:47 crc kubenswrapper[5003]: I0126 10:45:47.388098 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4jrnq" Jan 26 10:45:52 crc kubenswrapper[5003]: I0126 10:45:52.666265 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:52 crc kubenswrapper[5003]: I0126 10:45:52.669696 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-cpxlv" Jan 26 10:45:52 crc kubenswrapper[5003]: I0126 10:45:52.749050 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:46:05 crc kubenswrapper[5003]: I0126 10:46:05.587681 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4n9nm" Jan 26 10:46:09 crc kubenswrapper[5003]: I0126 10:46:09.040920 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:46:09 crc kubenswrapper[5003]: I0126 10:46:09.042228 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.469997 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 10:46:11 crc kubenswrapper[5003]: E0126 10:46:11.470327 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4" containerName="pruner" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.470340 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4" containerName="pruner" Jan 26 10:46:11 crc kubenswrapper[5003]: E0126 10:46:11.470351 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e5c341c-99a9-4d1d-a354-aa23722a8d13" containerName="pruner" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.470358 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e5c341c-99a9-4d1d-a354-aa23722a8d13" containerName="pruner" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.470466 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cda18bc-d2dd-4b7e-9f15-1a47f2e4dbf4" containerName="pruner" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.470487 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e5c341c-99a9-4d1d-a354-aa23722a8d13" containerName="pruner" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.470866 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.480347 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.480628 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.481039 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.580102 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.580179 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.682157 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.682260 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.682321 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.710225 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:11 crc kubenswrapper[5003]: I0126 10:46:11.800721 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:12 crc kubenswrapper[5003]: I0126 10:46:12.399251 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 10:46:14 crc kubenswrapper[5003]: E0126 10:46:14.545695 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 26 10:46:14 crc kubenswrapper[5003]: E0126 10:46:14.545945 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5shrs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-xfvlh_openshift-marketplace(c837509e-b233-4ed4-9c00-49f01de19953): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 10:46:14 crc kubenswrapper[5003]: E0126 10:46:14.547351 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-xfvlh" podUID="c837509e-b233-4ed4-9c00-49f01de19953" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.459701 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.460608 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.467990 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.629888 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5eaf430b-7e00-4049-8e4a-afb533141643-kube-api-access\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.630003 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.630035 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-var-lock\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.730708 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.730761 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-var-lock\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.730811 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.730849 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5eaf430b-7e00-4049-8e4a-afb533141643-kube-api-access\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.730865 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-var-lock\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.747878 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5eaf430b-7e00-4049-8e4a-afb533141643-kube-api-access\") pod \"installer-9-crc\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: I0126 10:46:15.792783 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:46:15 crc kubenswrapper[5003]: E0126 10:46:15.854594 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-xfvlh" podUID="c837509e-b233-4ed4-9c00-49f01de19953" Jan 26 10:46:15 crc kubenswrapper[5003]: E0126 10:46:15.931902 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 26 10:46:15 crc kubenswrapper[5003]: E0126 10:46:15.932086 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n7n25,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7sz59_openshift-marketplace(259b94cb-d033-4789-a0b6-dbfc3a361d0a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 10:46:15 crc kubenswrapper[5003]: E0126 10:46:15.934028 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7sz59" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" Jan 26 10:46:15 crc kubenswrapper[5003]: E0126 10:46:15.942849 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 26 10:46:15 crc kubenswrapper[5003]: E0126 10:46:15.943031 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fqnqd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-nlxw6_openshift-marketplace(a6f195f7-8805-422e-b316-c57c71a27a38): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 10:46:15 crc kubenswrapper[5003]: E0126 10:46:15.944232 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-nlxw6" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" Jan 26 10:46:19 crc kubenswrapper[5003]: E0126 10:46:19.005022 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7sz59" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" Jan 26 10:46:19 crc kubenswrapper[5003]: E0126 10:46:19.005253 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-nlxw6" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" Jan 26 10:46:19 crc kubenswrapper[5003]: E0126 10:46:19.099844 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 26 10:46:19 crc kubenswrapper[5003]: E0126 10:46:19.100016 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8lbq9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-7j6tj_openshift-marketplace(36a0e821-b752-4299-a9ec-1c719bdf5b2c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 10:46:19 crc kubenswrapper[5003]: E0126 10:46:19.101305 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-7j6tj" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.410020 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-7j6tj" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.487459 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.487940 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ksvrq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-qbjvl_openshift-marketplace(afdef7f7-32b5-4976-881a-398dc09ac9bd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.491374 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-qbjvl" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.501908 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.502084 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-smjbj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-nd6jd_openshift-marketplace(0fc9d3a8-b3ed-4321-a724-f0df98e10736): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.503462 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-nd6jd" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.568516 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.569064 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9ckvh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4nzsj_openshift-marketplace(056db06f-766d-4393-87b8-4148b3f4c3c9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.570241 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4nzsj" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.605988 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.606113 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6rtfn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mh5fg_openshift-marketplace(f4378c6f-f796-4576-91b9-87a7ac43193e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 10:46:20 crc kubenswrapper[5003]: E0126 10:46:20.607438 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-mh5fg" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" Jan 26 10:46:20 crc kubenswrapper[5003]: I0126 10:46:20.867531 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-4jrnq"] Jan 26 10:46:20 crc kubenswrapper[5003]: W0126 10:46:20.874524 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa06185d_fe5e_423a_b5a7_19e8bb7c8a60.slice/crio-489c28471131cc7c54c7cd9abb140d3dca845db280ef1f73ea2ed027ebfe3e60 WatchSource:0}: Error finding container 489c28471131cc7c54c7cd9abb140d3dca845db280ef1f73ea2ed027ebfe3e60: Status 404 returned error can't find the container with id 489c28471131cc7c54c7cd9abb140d3dca845db280ef1f73ea2ed027ebfe3e60 Jan 26 10:46:20 crc kubenswrapper[5003]: I0126 10:46:20.966710 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 10:46:20 crc kubenswrapper[5003]: I0126 10:46:20.970326 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 10:46:20 crc kubenswrapper[5003]: W0126 10:46:20.979974 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podc61dfae0_67ed_41c4_891d_dbc0bca2ce7a.slice/crio-bff6cefc989a64a6e7da3a304a0fa75662539300e395c63c7336f7199a1daa26 WatchSource:0}: Error finding container bff6cefc989a64a6e7da3a304a0fa75662539300e395c63c7336f7199a1daa26: Status 404 returned error can't find the container with id bff6cefc989a64a6e7da3a304a0fa75662539300e395c63c7336f7199a1daa26 Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.444600 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a","Type":"ContainerStarted","Data":"f5c5c3721901cff2743a2d97ba919806e90a85571c578aea1d3ed10d5b1096ca"} Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.444924 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a","Type":"ContainerStarted","Data":"bff6cefc989a64a6e7da3a304a0fa75662539300e395c63c7336f7199a1daa26"} Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.445943 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5eaf430b-7e00-4049-8e4a-afb533141643","Type":"ContainerStarted","Data":"ae1d5075a04894decd96a49315da7c3c66965eafdc64b55263d61660b4fa37c3"} Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.445977 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5eaf430b-7e00-4049-8e4a-afb533141643","Type":"ContainerStarted","Data":"851d923cdf4e09260900907c4f2efbf5c500554352a24e8044df052f51b96559"} Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.447690 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" event={"ID":"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60","Type":"ContainerStarted","Data":"5db6ee16791f53253d78f410e2a79a06744ea77dfcf8e10c63cd9fd6384fc95f"} Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.447735 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" event={"ID":"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60","Type":"ContainerStarted","Data":"5588ea21be4c2491a22e8586e82fdd7fc86a9ddbf7e0747ff1ae3b6788209be7"} Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.447750 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4jrnq" event={"ID":"aa06185d-fe5e-423a-b5a7-19e8bb7c8a60","Type":"ContainerStarted","Data":"489c28471131cc7c54c7cd9abb140d3dca845db280ef1f73ea2ed027ebfe3e60"} Jan 26 10:46:21 crc kubenswrapper[5003]: E0126 10:46:21.449274 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mh5fg" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" Jan 26 10:46:21 crc kubenswrapper[5003]: E0126 10:46:21.449745 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-qbjvl" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" Jan 26 10:46:21 crc kubenswrapper[5003]: E0126 10:46:21.450650 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4nzsj" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" Jan 26 10:46:21 crc kubenswrapper[5003]: E0126 10:46:21.450942 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-nd6jd" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.459424 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=10.459407205 podStartE2EDuration="10.459407205s" podCreationTimestamp="2026-01-26 10:46:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:46:21.455198723 +0000 UTC m=+196.996424304" watchObservedRunningTime="2026-01-26 10:46:21.459407205 +0000 UTC m=+197.000632766" Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.531410 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=6.53139505 podStartE2EDuration="6.53139505s" podCreationTimestamp="2026-01-26 10:46:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:46:21.525109627 +0000 UTC m=+197.066335188" watchObservedRunningTime="2026-01-26 10:46:21.53139505 +0000 UTC m=+197.072620611" Jan 26 10:46:21 crc kubenswrapper[5003]: I0126 10:46:21.557321 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-4jrnq" podStartSLOduration=177.557297284 podStartE2EDuration="2m57.557297284s" podCreationTimestamp="2026-01-26 10:43:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:46:21.553141283 +0000 UTC m=+197.094366854" watchObservedRunningTime="2026-01-26 10:46:21.557297284 +0000 UTC m=+197.098522845" Jan 26 10:46:22 crc kubenswrapper[5003]: I0126 10:46:22.457681 5003 generic.go:334] "Generic (PLEG): container finished" podID="c61dfae0-67ed-41c4-891d-dbc0bca2ce7a" containerID="f5c5c3721901cff2743a2d97ba919806e90a85571c578aea1d3ed10d5b1096ca" exitCode=0 Jan 26 10:46:22 crc kubenswrapper[5003]: I0126 10:46:22.458699 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a","Type":"ContainerDied","Data":"f5c5c3721901cff2743a2d97ba919806e90a85571c578aea1d3ed10d5b1096ca"} Jan 26 10:46:22 crc kubenswrapper[5003]: I0126 10:46:22.998332 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-22wlq"] Jan 26 10:46:23 crc kubenswrapper[5003]: I0126 10:46:23.737998 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:23 crc kubenswrapper[5003]: I0126 10:46:23.855321 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kubelet-dir\") pod \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\" (UID: \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\") " Jan 26 10:46:23 crc kubenswrapper[5003]: I0126 10:46:23.855405 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kube-api-access\") pod \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\" (UID: \"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a\") " Jan 26 10:46:23 crc kubenswrapper[5003]: I0126 10:46:23.855444 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c61dfae0-67ed-41c4-891d-dbc0bca2ce7a" (UID: "c61dfae0-67ed-41c4-891d-dbc0bca2ce7a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:46:23 crc kubenswrapper[5003]: I0126 10:46:23.855584 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:23 crc kubenswrapper[5003]: I0126 10:46:23.863422 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c61dfae0-67ed-41c4-891d-dbc0bca2ce7a" (UID: "c61dfae0-67ed-41c4-891d-dbc0bca2ce7a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:46:23 crc kubenswrapper[5003]: I0126 10:46:23.956971 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c61dfae0-67ed-41c4-891d-dbc0bca2ce7a-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:24 crc kubenswrapper[5003]: I0126 10:46:24.475988 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c61dfae0-67ed-41c4-891d-dbc0bca2ce7a","Type":"ContainerDied","Data":"bff6cefc989a64a6e7da3a304a0fa75662539300e395c63c7336f7199a1daa26"} Jan 26 10:46:24 crc kubenswrapper[5003]: I0126 10:46:24.476024 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bff6cefc989a64a6e7da3a304a0fa75662539300e395c63c7336f7199a1daa26" Jan 26 10:46:24 crc kubenswrapper[5003]: I0126 10:46:24.476070 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 10:46:31 crc kubenswrapper[5003]: I0126 10:46:31.514998 5003 generic.go:334] "Generic (PLEG): container finished" podID="c837509e-b233-4ed4-9c00-49f01de19953" containerID="50da230bc4280d27faceae10386437555e2f964ca32798be11f0a8060e3605dc" exitCode=0 Jan 26 10:46:31 crc kubenswrapper[5003]: I0126 10:46:31.515087 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfvlh" event={"ID":"c837509e-b233-4ed4-9c00-49f01de19953","Type":"ContainerDied","Data":"50da230bc4280d27faceae10386437555e2f964ca32798be11f0a8060e3605dc"} Jan 26 10:46:32 crc kubenswrapper[5003]: I0126 10:46:32.522573 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfvlh" event={"ID":"c837509e-b233-4ed4-9c00-49f01de19953","Type":"ContainerStarted","Data":"eb2b2af639f0cd69ebc8ed4f68b5d3d62cfae95bd194307b0e305f47da148809"} Jan 26 10:46:32 crc kubenswrapper[5003]: I0126 10:46:32.524605 5003 generic.go:334] "Generic (PLEG): container finished" podID="a6f195f7-8805-422e-b316-c57c71a27a38" containerID="330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5" exitCode=0 Jan 26 10:46:32 crc kubenswrapper[5003]: I0126 10:46:32.524675 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlxw6" event={"ID":"a6f195f7-8805-422e-b316-c57c71a27a38","Type":"ContainerDied","Data":"330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5"} Jan 26 10:46:32 crc kubenswrapper[5003]: I0126 10:46:32.541906 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xfvlh" podStartSLOduration=2.727871413 podStartE2EDuration="59.54188921s" podCreationTimestamp="2026-01-26 10:45:33 +0000 UTC" firstStartedPulling="2026-01-26 10:45:35.064531928 +0000 UTC m=+150.605757489" lastFinishedPulling="2026-01-26 10:46:31.878549725 +0000 UTC m=+207.419775286" observedRunningTime="2026-01-26 10:46:32.540549751 +0000 UTC m=+208.081775322" watchObservedRunningTime="2026-01-26 10:46:32.54188921 +0000 UTC m=+208.083114771" Jan 26 10:46:33 crc kubenswrapper[5003]: I0126 10:46:33.425174 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:46:33 crc kubenswrapper[5003]: I0126 10:46:33.425515 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:46:33 crc kubenswrapper[5003]: I0126 10:46:33.531637 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlxw6" event={"ID":"a6f195f7-8805-422e-b316-c57c71a27a38","Type":"ContainerStarted","Data":"f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f"} Jan 26 10:46:33 crc kubenswrapper[5003]: I0126 10:46:33.533670 5003 generic.go:334] "Generic (PLEG): container finished" podID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerID="1d4bee08acaf34d356de7f26a3ba7cf72ba15f775df999adf0b7db7e63961e5f" exitCode=0 Jan 26 10:46:33 crc kubenswrapper[5003]: I0126 10:46:33.533720 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sz59" event={"ID":"259b94cb-d033-4789-a0b6-dbfc3a361d0a","Type":"ContainerDied","Data":"1d4bee08acaf34d356de7f26a3ba7cf72ba15f775df999adf0b7db7e63961e5f"} Jan 26 10:46:33 crc kubenswrapper[5003]: I0126 10:46:33.555384 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nlxw6" podStartSLOduration=2.458439588 podStartE2EDuration="1m1.555366514s" podCreationTimestamp="2026-01-26 10:45:32 +0000 UTC" firstStartedPulling="2026-01-26 10:45:33.910220434 +0000 UTC m=+149.451445995" lastFinishedPulling="2026-01-26 10:46:33.00714736 +0000 UTC m=+208.548372921" observedRunningTime="2026-01-26 10:46:33.552864221 +0000 UTC m=+209.094089792" watchObservedRunningTime="2026-01-26 10:46:33.555366514 +0000 UTC m=+209.096592075" Jan 26 10:46:34 crc kubenswrapper[5003]: I0126 10:46:34.521251 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-xfvlh" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="registry-server" probeResult="failure" output=< Jan 26 10:46:34 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 26 10:46:34 crc kubenswrapper[5003]: > Jan 26 10:46:34 crc kubenswrapper[5003]: I0126 10:46:34.570087 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j6tj" event={"ID":"36a0e821-b752-4299-a9ec-1c719bdf5b2c","Type":"ContainerStarted","Data":"61337665437faac32f969edb14e8efbfa5d1b813c7ed6ddb3c24669358d26eee"} Jan 26 10:46:34 crc kubenswrapper[5003]: I0126 10:46:34.573744 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sz59" event={"ID":"259b94cb-d033-4789-a0b6-dbfc3a361d0a","Type":"ContainerStarted","Data":"886e7190eceb97ca48af46167911abbb583fa330498d831795713a992fbd8681"} Jan 26 10:46:34 crc kubenswrapper[5003]: I0126 10:46:34.576484 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerID="c29b0046d470ccce81ed26ac116532e3a0431209fd407a3c24ecebe3d26a06d4" exitCode=0 Jan 26 10:46:34 crc kubenswrapper[5003]: I0126 10:46:34.576526 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh5fg" event={"ID":"f4378c6f-f796-4576-91b9-87a7ac43193e","Type":"ContainerDied","Data":"c29b0046d470ccce81ed26ac116532e3a0431209fd407a3c24ecebe3d26a06d4"} Jan 26 10:46:34 crc kubenswrapper[5003]: I0126 10:46:34.608602 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7sz59" podStartSLOduration=2.739516696 podStartE2EDuration="1m1.608580304s" podCreationTimestamp="2026-01-26 10:45:33 +0000 UTC" firstStartedPulling="2026-01-26 10:45:35.05654975 +0000 UTC m=+150.597775321" lastFinishedPulling="2026-01-26 10:46:33.925613368 +0000 UTC m=+209.466838929" observedRunningTime="2026-01-26 10:46:34.606140553 +0000 UTC m=+210.147366114" watchObservedRunningTime="2026-01-26 10:46:34.608580304 +0000 UTC m=+210.149805865" Jan 26 10:46:35 crc kubenswrapper[5003]: I0126 10:46:35.583354 5003 generic.go:334] "Generic (PLEG): container finished" podID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerID="61337665437faac32f969edb14e8efbfa5d1b813c7ed6ddb3c24669358d26eee" exitCode=0 Jan 26 10:46:35 crc kubenswrapper[5003]: I0126 10:46:35.583406 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j6tj" event={"ID":"36a0e821-b752-4299-a9ec-1c719bdf5b2c","Type":"ContainerDied","Data":"61337665437faac32f969edb14e8efbfa5d1b813c7ed6ddb3c24669358d26eee"} Jan 26 10:46:38 crc kubenswrapper[5003]: I0126 10:46:38.605151 5003 generic.go:334] "Generic (PLEG): container finished" podID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerID="b2751aab7bfbc141305953a6a3b5b9ff3804c17a274c13c64e78edf0174e6416" exitCode=0 Jan 26 10:46:38 crc kubenswrapper[5003]: I0126 10:46:38.605218 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd6jd" event={"ID":"0fc9d3a8-b3ed-4321-a724-f0df98e10736","Type":"ContainerDied","Data":"b2751aab7bfbc141305953a6a3b5b9ff3804c17a274c13c64e78edf0174e6416"} Jan 26 10:46:38 crc kubenswrapper[5003]: I0126 10:46:38.609957 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh5fg" event={"ID":"f4378c6f-f796-4576-91b9-87a7ac43193e","Type":"ContainerStarted","Data":"2a917c8c9a1b0b79a8d88507a4510407a06c086ac6094c34588c99f00987ce9d"} Jan 26 10:46:38 crc kubenswrapper[5003]: I0126 10:46:38.612687 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4nzsj" event={"ID":"056db06f-766d-4393-87b8-4148b3f4c3c9","Type":"ContainerStarted","Data":"dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9"} Jan 26 10:46:38 crc kubenswrapper[5003]: I0126 10:46:38.614682 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j6tj" event={"ID":"36a0e821-b752-4299-a9ec-1c719bdf5b2c","Type":"ContainerStarted","Data":"d259242256f4f23244924df8049cf24aa283167e6f30e8710a41962afdd1a514"} Jan 26 10:46:38 crc kubenswrapper[5003]: I0126 10:46:38.676495 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7j6tj" podStartSLOduration=2.84870763 podStartE2EDuration="1m3.676477129s" podCreationTimestamp="2026-01-26 10:45:35 +0000 UTC" firstStartedPulling="2026-01-26 10:45:37.15669463 +0000 UTC m=+152.697920191" lastFinishedPulling="2026-01-26 10:46:37.984464129 +0000 UTC m=+213.525689690" observedRunningTime="2026-01-26 10:46:38.657671331 +0000 UTC m=+214.198896892" watchObservedRunningTime="2026-01-26 10:46:38.676477129 +0000 UTC m=+214.217702700" Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.040394 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.040666 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.040786 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.041476 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.041646 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a" gracePeriod=600 Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.633669 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbjvl" event={"ID":"afdef7f7-32b5-4976-881a-398dc09ac9bd","Type":"ContainerStarted","Data":"dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f"} Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.637582 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a" exitCode=0 Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.637645 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a"} Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.641822 5003 generic.go:334] "Generic (PLEG): container finished" podID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerID="dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9" exitCode=0 Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.641868 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4nzsj" event={"ID":"056db06f-766d-4393-87b8-4148b3f4c3c9","Type":"ContainerDied","Data":"dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9"} Jan 26 10:46:39 crc kubenswrapper[5003]: I0126 10:46:39.654414 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mh5fg" podStartSLOduration=3.950901197 podStartE2EDuration="1m3.654394859s" podCreationTimestamp="2026-01-26 10:45:36 +0000 UTC" firstStartedPulling="2026-01-26 10:45:38.1927638 +0000 UTC m=+153.733989361" lastFinishedPulling="2026-01-26 10:46:37.896257462 +0000 UTC m=+213.437483023" observedRunningTime="2026-01-26 10:46:38.678209349 +0000 UTC m=+214.219434910" watchObservedRunningTime="2026-01-26 10:46:39.654394859 +0000 UTC m=+215.195620420" Jan 26 10:46:40 crc kubenswrapper[5003]: I0126 10:46:40.648076 5003 generic.go:334] "Generic (PLEG): container finished" podID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerID="dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f" exitCode=0 Jan 26 10:46:40 crc kubenswrapper[5003]: I0126 10:46:40.648147 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbjvl" event={"ID":"afdef7f7-32b5-4976-881a-398dc09ac9bd","Type":"ContainerDied","Data":"dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f"} Jan 26 10:46:40 crc kubenswrapper[5003]: I0126 10:46:40.651892 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"dd6b3a58165b8c76447b588cc3ce270db8803864644876e30d9eecfbf65acf09"} Jan 26 10:46:40 crc kubenswrapper[5003]: I0126 10:46:40.657316 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4nzsj" event={"ID":"056db06f-766d-4393-87b8-4148b3f4c3c9","Type":"ContainerStarted","Data":"c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d"} Jan 26 10:46:40 crc kubenswrapper[5003]: I0126 10:46:40.659031 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd6jd" event={"ID":"0fc9d3a8-b3ed-4321-a724-f0df98e10736","Type":"ContainerStarted","Data":"bfc1d8fa21c617d5905caa8cd4ef280a36df5bbd731e790e883350d1b1126ea7"} Jan 26 10:46:40 crc kubenswrapper[5003]: I0126 10:46:40.684526 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4nzsj" podStartSLOduration=2.631264475 podStartE2EDuration="1m8.684510237s" podCreationTimestamp="2026-01-26 10:45:32 +0000 UTC" firstStartedPulling="2026-01-26 10:45:34.019140571 +0000 UTC m=+149.560366132" lastFinishedPulling="2026-01-26 10:46:40.072386313 +0000 UTC m=+215.613611894" observedRunningTime="2026-01-26 10:46:40.682665864 +0000 UTC m=+216.223891425" watchObservedRunningTime="2026-01-26 10:46:40.684510237 +0000 UTC m=+216.225735798" Jan 26 10:46:40 crc kubenswrapper[5003]: I0126 10:46:40.704601 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nd6jd" podStartSLOduration=2.260576245 podStartE2EDuration="1m5.704582521s" podCreationTimestamp="2026-01-26 10:45:35 +0000 UTC" firstStartedPulling="2026-01-26 10:45:36.118990863 +0000 UTC m=+151.660216424" lastFinishedPulling="2026-01-26 10:46:39.562997139 +0000 UTC m=+215.104222700" observedRunningTime="2026-01-26 10:46:40.701846082 +0000 UTC m=+216.243071643" watchObservedRunningTime="2026-01-26 10:46:40.704582521 +0000 UTC m=+216.245808082" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.008630 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.008707 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.253350 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.253804 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.382782 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.480105 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.525090 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.586939 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.587000 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.634538 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.729978 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:46:43 crc kubenswrapper[5003]: I0126 10:46:43.743771 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:46:44 crc kubenswrapper[5003]: I0126 10:46:44.056117 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-4nzsj" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="registry-server" probeResult="failure" output=< Jan 26 10:46:44 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 26 10:46:44 crc kubenswrapper[5003]: > Jan 26 10:46:44 crc kubenswrapper[5003]: I0126 10:46:44.831795 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xfvlh"] Jan 26 10:46:44 crc kubenswrapper[5003]: I0126 10:46:44.832240 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xfvlh" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="registry-server" containerID="cri-o://eb2b2af639f0cd69ebc8ed4f68b5d3d62cfae95bd194307b0e305f47da148809" gracePeriod=2 Jan 26 10:46:45 crc kubenswrapper[5003]: I0126 10:46:45.405694 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:46:45 crc kubenswrapper[5003]: I0126 10:46:45.405759 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:46:45 crc kubenswrapper[5003]: I0126 10:46:45.460313 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:46:45 crc kubenswrapper[5003]: I0126 10:46:45.697424 5003 generic.go:334] "Generic (PLEG): container finished" podID="c837509e-b233-4ed4-9c00-49f01de19953" containerID="eb2b2af639f0cd69ebc8ed4f68b5d3d62cfae95bd194307b0e305f47da148809" exitCode=0 Jan 26 10:46:45 crc kubenswrapper[5003]: I0126 10:46:45.697510 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfvlh" event={"ID":"c837509e-b233-4ed4-9c00-49f01de19953","Type":"ContainerDied","Data":"eb2b2af639f0cd69ebc8ed4f68b5d3d62cfae95bd194307b0e305f47da148809"} Jan 26 10:46:45 crc kubenswrapper[5003]: I0126 10:46:45.736701 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:46:45 crc kubenswrapper[5003]: I0126 10:46:45.847329 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7sz59"] Jan 26 10:46:45 crc kubenswrapper[5003]: I0126 10:46:45.847517 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7sz59" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerName="registry-server" containerID="cri-o://886e7190eceb97ca48af46167911abbb583fa330498d831795713a992fbd8681" gracePeriod=2 Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.198433 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.198480 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.241222 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.565570 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.602584 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.602667 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.610641 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-utilities\") pod \"c837509e-b233-4ed4-9c00-49f01de19953\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.610779 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-catalog-content\") pod \"c837509e-b233-4ed4-9c00-49f01de19953\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.610816 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5shrs\" (UniqueName: \"kubernetes.io/projected/c837509e-b233-4ed4-9c00-49f01de19953-kube-api-access-5shrs\") pod \"c837509e-b233-4ed4-9c00-49f01de19953\" (UID: \"c837509e-b233-4ed4-9c00-49f01de19953\") " Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.611902 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-utilities" (OuterVolumeSpecName: "utilities") pod "c837509e-b233-4ed4-9c00-49f01de19953" (UID: "c837509e-b233-4ed4-9c00-49f01de19953"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.616533 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c837509e-b233-4ed4-9c00-49f01de19953-kube-api-access-5shrs" (OuterVolumeSpecName: "kube-api-access-5shrs") pod "c837509e-b233-4ed4-9c00-49f01de19953" (UID: "c837509e-b233-4ed4-9c00-49f01de19953"). InnerVolumeSpecName "kube-api-access-5shrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.640612 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.705034 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfvlh" event={"ID":"c837509e-b233-4ed4-9c00-49f01de19953","Type":"ContainerDied","Data":"d35d562e4caaebbdfad10703173ebbbe414579c525bf73e5d975f121156e8bcc"} Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.705089 5003 scope.go:117] "RemoveContainer" containerID="eb2b2af639f0cd69ebc8ed4f68b5d3d62cfae95bd194307b0e305f47da148809" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.705267 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfvlh" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.712145 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5shrs\" (UniqueName: \"kubernetes.io/projected/c837509e-b233-4ed4-9c00-49f01de19953-kube-api-access-5shrs\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.712206 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.718586 5003 scope.go:117] "RemoveContainer" containerID="50da230bc4280d27faceae10386437555e2f964ca32798be11f0a8060e3605dc" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.738601 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.739501 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.741888 5003 scope.go:117] "RemoveContainer" containerID="780a6ca6b697263f9c5581f157b1f638432aed615cfd3decfa40e376351fe8d4" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.809379 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c837509e-b233-4ed4-9c00-49f01de19953" (UID: "c837509e-b233-4ed4-9c00-49f01de19953"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:46:46 crc kubenswrapper[5003]: I0126 10:46:46.812863 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c837509e-b233-4ed4-9c00-49f01de19953-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:46 crc kubenswrapper[5003]: E0126 10:46:46.870449 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod259b94cb_d033_4789_a0b6_dbfc3a361d0a.slice/crio-conmon-886e7190eceb97ca48af46167911abbb583fa330498d831795713a992fbd8681.scope\": RecentStats: unable to find data in memory cache]" Jan 26 10:46:47 crc kubenswrapper[5003]: I0126 10:46:47.054980 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xfvlh"] Jan 26 10:46:47 crc kubenswrapper[5003]: I0126 10:46:47.057197 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xfvlh"] Jan 26 10:46:47 crc kubenswrapper[5003]: I0126 10:46:47.716520 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbjvl" event={"ID":"afdef7f7-32b5-4976-881a-398dc09ac9bd","Type":"ContainerStarted","Data":"1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c"} Jan 26 10:46:47 crc kubenswrapper[5003]: I0126 10:46:47.722596 5003 generic.go:334] "Generic (PLEG): container finished" podID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerID="886e7190eceb97ca48af46167911abbb583fa330498d831795713a992fbd8681" exitCode=0 Jan 26 10:46:47 crc kubenswrapper[5003]: I0126 10:46:47.722650 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sz59" event={"ID":"259b94cb-d033-4789-a0b6-dbfc3a361d0a","Type":"ContainerDied","Data":"886e7190eceb97ca48af46167911abbb583fa330498d831795713a992fbd8681"} Jan 26 10:46:47 crc kubenswrapper[5003]: I0126 10:46:47.736484 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qbjvl" podStartSLOduration=4.058087649 podStartE2EDuration="1m13.736441123s" podCreationTimestamp="2026-01-26 10:45:34 +0000 UTC" firstStartedPulling="2026-01-26 10:45:36.112003364 +0000 UTC m=+151.653228925" lastFinishedPulling="2026-01-26 10:46:45.790356848 +0000 UTC m=+221.331582399" observedRunningTime="2026-01-26 10:46:47.73632679 +0000 UTC m=+223.277552351" watchObservedRunningTime="2026-01-26 10:46:47.736441123 +0000 UTC m=+223.277666704" Jan 26 10:46:47 crc kubenswrapper[5003]: I0126 10:46:47.947795 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.025877 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-utilities\") pod \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.025911 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-catalog-content\") pod \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.025977 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7n25\" (UniqueName: \"kubernetes.io/projected/259b94cb-d033-4789-a0b6-dbfc3a361d0a-kube-api-access-n7n25\") pod \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\" (UID: \"259b94cb-d033-4789-a0b6-dbfc3a361d0a\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.026719 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-utilities" (OuterVolumeSpecName: "utilities") pod "259b94cb-d033-4789-a0b6-dbfc3a361d0a" (UID: "259b94cb-d033-4789-a0b6-dbfc3a361d0a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.031445 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/259b94cb-d033-4789-a0b6-dbfc3a361d0a-kube-api-access-n7n25" (OuterVolumeSpecName: "kube-api-access-n7n25") pod "259b94cb-d033-4789-a0b6-dbfc3a361d0a" (UID: "259b94cb-d033-4789-a0b6-dbfc3a361d0a"). InnerVolumeSpecName "kube-api-access-n7n25". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.049250 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" podUID="7b2d0b83-b35f-4128-af91-623a6871a431" containerName="oauth-openshift" containerID="cri-o://6e9ea2e58f3c75874a0197e2a425cd4b1bc85408cb5839c48ad3a61b89602d15" gracePeriod=15 Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.082948 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "259b94cb-d033-4789-a0b6-dbfc3a361d0a" (UID: "259b94cb-d033-4789-a0b6-dbfc3a361d0a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.126777 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.126994 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/259b94cb-d033-4789-a0b6-dbfc3a361d0a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.127090 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7n25\" (UniqueName: \"kubernetes.io/projected/259b94cb-d033-4789-a0b6-dbfc3a361d0a-kube-api-access-n7n25\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.233778 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nd6jd"] Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.234040 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nd6jd" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerName="registry-server" containerID="cri-o://bfc1d8fa21c617d5905caa8cd4ef280a36df5bbd731e790e883350d1b1126ea7" gracePeriod=2 Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.731316 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7sz59" event={"ID":"259b94cb-d033-4789-a0b6-dbfc3a361d0a","Type":"ContainerDied","Data":"0d2f2e39148f96d768cdb6dce9c726bad76a6fad1b38d1e25c65f2a706fdca5f"} Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.731585 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7sz59" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.731704 5003 scope.go:117] "RemoveContainer" containerID="886e7190eceb97ca48af46167911abbb583fa330498d831795713a992fbd8681" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.737652 5003 generic.go:334] "Generic (PLEG): container finished" podID="7b2d0b83-b35f-4128-af91-623a6871a431" containerID="6e9ea2e58f3c75874a0197e2a425cd4b1bc85408cb5839c48ad3a61b89602d15" exitCode=0 Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.737709 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" event={"ID":"7b2d0b83-b35f-4128-af91-623a6871a431","Type":"ContainerDied","Data":"6e9ea2e58f3c75874a0197e2a425cd4b1bc85408cb5839c48ad3a61b89602d15"} Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.742794 5003 generic.go:334] "Generic (PLEG): container finished" podID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerID="bfc1d8fa21c617d5905caa8cd4ef280a36df5bbd731e790e883350d1b1126ea7" exitCode=0 Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.742840 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd6jd" event={"ID":"0fc9d3a8-b3ed-4321-a724-f0df98e10736","Type":"ContainerDied","Data":"bfc1d8fa21c617d5905caa8cd4ef280a36df5bbd731e790e883350d1b1126ea7"} Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.757401 5003 scope.go:117] "RemoveContainer" containerID="1d4bee08acaf34d356de7f26a3ba7cf72ba15f775df999adf0b7db7e63961e5f" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.771423 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7sz59"] Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.774674 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7sz59"] Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.795371 5003 scope.go:117] "RemoveContainer" containerID="57718a922d75adf8bfcb98dddf5e70d80cfb731cd0fd714423a17bf87c8a35d5" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.841610 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939092 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7b2d0b83-b35f-4128-af91-623a6871a431-audit-dir\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939147 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-cliconfig\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-service-ca\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939196 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-router-certs\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939216 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-serving-cert\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939233 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-login\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939252 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-audit-policies\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939300 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-error\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939323 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-idp-0-file-data\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939382 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-ocp-branding-template\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939401 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-trusted-ca-bundle\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939433 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-session\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939466 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-provider-selection\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939486 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh4t6\" (UniqueName: \"kubernetes.io/projected/7b2d0b83-b35f-4128-af91-623a6871a431-kube-api-access-hh4t6\") pod \"7b2d0b83-b35f-4128-af91-623a6871a431\" (UID: \"7b2d0b83-b35f-4128-af91-623a6871a431\") " Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.940413 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.940932 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.941310 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.941374 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.939266 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7b2d0b83-b35f-4128-af91-623a6871a431-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.943622 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b2d0b83-b35f-4128-af91-623a6871a431-kube-api-access-hh4t6" (OuterVolumeSpecName: "kube-api-access-hh4t6") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "kube-api-access-hh4t6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.943633 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.944041 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.944355 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.944616 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.945489 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.945918 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.946272 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:46:48 crc kubenswrapper[5003]: I0126 10:46:48.947667 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "7b2d0b83-b35f-4128-af91-623a6871a431" (UID: "7b2d0b83-b35f-4128-af91-623a6871a431"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.007472 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" path="/var/lib/kubelet/pods/259b94cb-d033-4789-a0b6-dbfc3a361d0a/volumes" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.008368 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c837509e-b233-4ed4-9c00-49f01de19953" path="/var/lib/kubelet/pods/c837509e-b233-4ed4-9c00-49f01de19953/volumes" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.040934 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.040984 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041006 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041021 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041037 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh4t6\" (UniqueName: \"kubernetes.io/projected/7b2d0b83-b35f-4128-af91-623a6871a431-kube-api-access-hh4t6\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041050 5003 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7b2d0b83-b35f-4128-af91-623a6871a431-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041060 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041071 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041082 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041092 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041103 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041119 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7b2d0b83-b35f-4128-af91-623a6871a431-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041130 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.041142 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7b2d0b83-b35f-4128-af91-623a6871a431-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.650657 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.747808 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smjbj\" (UniqueName: \"kubernetes.io/projected/0fc9d3a8-b3ed-4321-a724-f0df98e10736-kube-api-access-smjbj\") pod \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.747873 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-catalog-content\") pod \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.747992 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-utilities\") pod \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\" (UID: \"0fc9d3a8-b3ed-4321-a724-f0df98e10736\") " Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.749428 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-utilities" (OuterVolumeSpecName: "utilities") pod "0fc9d3a8-b3ed-4321-a724-f0df98e10736" (UID: "0fc9d3a8-b3ed-4321-a724-f0df98e10736"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.751824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nd6jd" event={"ID":"0fc9d3a8-b3ed-4321-a724-f0df98e10736","Type":"ContainerDied","Data":"cebe25e20a515b7fa20c1de92e68ea1cff4b0cf7fecf04cde54dcca959f18682"} Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.751848 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nd6jd" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.751919 5003 scope.go:117] "RemoveContainer" containerID="bfc1d8fa21c617d5905caa8cd4ef280a36df5bbd731e790e883350d1b1126ea7" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.752742 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fc9d3a8-b3ed-4321-a724-f0df98e10736-kube-api-access-smjbj" (OuterVolumeSpecName: "kube-api-access-smjbj") pod "0fc9d3a8-b3ed-4321-a724-f0df98e10736" (UID: "0fc9d3a8-b3ed-4321-a724-f0df98e10736"). InnerVolumeSpecName "kube-api-access-smjbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.754655 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" event={"ID":"7b2d0b83-b35f-4128-af91-623a6871a431","Type":"ContainerDied","Data":"024a3fd4ad33291bd953c7094cd47e23de15d599ebadd9335b88263dd9666157"} Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.754762 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-22wlq" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.781092 5003 scope.go:117] "RemoveContainer" containerID="b2751aab7bfbc141305953a6a3b5b9ff3804c17a274c13c64e78edf0174e6416" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.786596 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-22wlq"] Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.790542 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-22wlq"] Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.823767 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0fc9d3a8-b3ed-4321-a724-f0df98e10736" (UID: "0fc9d3a8-b3ed-4321-a724-f0df98e10736"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.835301 5003 scope.go:117] "RemoveContainer" containerID="74aa587e54ab82cf08024a21f59294d1b671957104ee2ede917ada84d0f8edb8" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.851596 5003 scope.go:117] "RemoveContainer" containerID="6e9ea2e58f3c75874a0197e2a425cd4b1bc85408cb5839c48ad3a61b89602d15" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.852008 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.852037 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smjbj\" (UniqueName: \"kubernetes.io/projected/0fc9d3a8-b3ed-4321-a724-f0df98e10736-kube-api-access-smjbj\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:49 crc kubenswrapper[5003]: I0126 10:46:49.852049 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc9d3a8-b3ed-4321-a724-f0df98e10736-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:50 crc kubenswrapper[5003]: I0126 10:46:50.090751 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nd6jd"] Jan 26 10:46:50 crc kubenswrapper[5003]: I0126 10:46:50.094632 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nd6jd"] Jan 26 10:46:50 crc kubenswrapper[5003]: I0126 10:46:50.638537 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mh5fg"] Jan 26 10:46:50 crc kubenswrapper[5003]: I0126 10:46:50.639398 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mh5fg" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerName="registry-server" containerID="cri-o://2a917c8c9a1b0b79a8d88507a4510407a06c086ac6094c34588c99f00987ce9d" gracePeriod=2 Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.012948 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" path="/var/lib/kubelet/pods/0fc9d3a8-b3ed-4321-a724-f0df98e10736/volumes" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.013932 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b2d0b83-b35f-4128-af91-623a6871a431" path="/var/lib/kubelet/pods/7b2d0b83-b35f-4128-af91-623a6871a431/volumes" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.276454 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5678f9c799-b9cc4"] Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.276782 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c61dfae0-67ed-41c4-891d-dbc0bca2ce7a" containerName="pruner" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.276806 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c61dfae0-67ed-41c4-891d-dbc0bca2ce7a" containerName="pruner" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.276832 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerName="extract-content" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.276913 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerName="extract-content" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.276929 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="extract-content" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.276943 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="extract-content" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.276960 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerName="extract-utilities" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.276972 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerName="extract-utilities" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.276991 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277003 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.277022 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b2d0b83-b35f-4128-af91-623a6871a431" containerName="oauth-openshift" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277035 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b2d0b83-b35f-4128-af91-623a6871a431" containerName="oauth-openshift" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.277064 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277076 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.277091 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277104 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.277123 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerName="extract-utilities" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277135 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerName="extract-utilities" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.277149 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="extract-utilities" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277161 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="extract-utilities" Jan 26 10:46:51 crc kubenswrapper[5003]: E0126 10:46:51.277179 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerName="extract-content" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277196 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerName="extract-content" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277443 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b2d0b83-b35f-4128-af91-623a6871a431" containerName="oauth-openshift" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277478 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c61dfae0-67ed-41c4-891d-dbc0bca2ce7a" containerName="pruner" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277502 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="259b94cb-d033-4789-a0b6-dbfc3a361d0a" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277522 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c837509e-b233-4ed4-9c00-49f01de19953" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.277540 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fc9d3a8-b3ed-4321-a724-f0df98e10736" containerName="registry-server" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.278113 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.282893 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.283155 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.283664 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.284709 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.285408 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.286771 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.287427 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.287597 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.288002 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.288011 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.289142 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.289345 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.299380 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.305017 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.309468 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5678f9c799-b9cc4"] Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.316108 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372239 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-service-ca\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372320 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rrfg\" (UniqueName: \"kubernetes.io/projected/094497ce-9c92-4b97-9817-6571ec8a5636-kube-api-access-7rrfg\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372366 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372427 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-session\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372451 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-audit-policies\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372476 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372533 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/094497ce-9c92-4b97-9817-6571ec8a5636-audit-dir\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372562 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372608 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-error\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372631 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372654 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372676 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-router-certs\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372711 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.372858 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-login\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.474483 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.474597 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-login\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.474673 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-service-ca\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.474729 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rrfg\" (UniqueName: \"kubernetes.io/projected/094497ce-9c92-4b97-9817-6571ec8a5636-kube-api-access-7rrfg\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.475457 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.475630 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-session\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476277 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-audit-policies\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476396 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476530 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/094497ce-9c92-4b97-9817-6571ec8a5636-audit-dir\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476563 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476596 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-error\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476630 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476664 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476696 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-router-certs\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476975 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.476449 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-service-ca\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.477570 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-audit-policies\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.478594 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/094497ce-9c92-4b97-9817-6571ec8a5636-audit-dir\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.478947 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.480851 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-session\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.480947 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.482989 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-login\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.484333 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-router-certs\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.484798 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-template-error\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.485429 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.485918 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.488102 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/094497ce-9c92-4b97-9817-6571ec8a5636-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.515350 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rrfg\" (UniqueName: \"kubernetes.io/projected/094497ce-9c92-4b97-9817-6571ec8a5636-kube-api-access-7rrfg\") pod \"oauth-openshift-5678f9c799-b9cc4\" (UID: \"094497ce-9c92-4b97-9817-6571ec8a5636\") " pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.609047 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.775747 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerID="2a917c8c9a1b0b79a8d88507a4510407a06c086ac6094c34588c99f00987ce9d" exitCode=0 Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.775792 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh5fg" event={"ID":"f4378c6f-f796-4576-91b9-87a7ac43193e","Type":"ContainerDied","Data":"2a917c8c9a1b0b79a8d88507a4510407a06c086ac6094c34588c99f00987ce9d"} Jan 26 10:46:51 crc kubenswrapper[5003]: I0126 10:46:51.820675 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5678f9c799-b9cc4"] Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.088297 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.186369 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-utilities\") pod \"f4378c6f-f796-4576-91b9-87a7ac43193e\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.186414 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-catalog-content\") pod \"f4378c6f-f796-4576-91b9-87a7ac43193e\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.186456 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rtfn\" (UniqueName: \"kubernetes.io/projected/f4378c6f-f796-4576-91b9-87a7ac43193e-kube-api-access-6rtfn\") pod \"f4378c6f-f796-4576-91b9-87a7ac43193e\" (UID: \"f4378c6f-f796-4576-91b9-87a7ac43193e\") " Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.187062 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-utilities" (OuterVolumeSpecName: "utilities") pod "f4378c6f-f796-4576-91b9-87a7ac43193e" (UID: "f4378c6f-f796-4576-91b9-87a7ac43193e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.190833 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4378c6f-f796-4576-91b9-87a7ac43193e-kube-api-access-6rtfn" (OuterVolumeSpecName: "kube-api-access-6rtfn") pod "f4378c6f-f796-4576-91b9-87a7ac43193e" (UID: "f4378c6f-f796-4576-91b9-87a7ac43193e"). InnerVolumeSpecName "kube-api-access-6rtfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.288024 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.288381 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rtfn\" (UniqueName: \"kubernetes.io/projected/f4378c6f-f796-4576-91b9-87a7ac43193e-kube-api-access-6rtfn\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.310356 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4378c6f-f796-4576-91b9-87a7ac43193e" (UID: "f4378c6f-f796-4576-91b9-87a7ac43193e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.389220 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4378c6f-f796-4576-91b9-87a7ac43193e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.790176 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh5fg" event={"ID":"f4378c6f-f796-4576-91b9-87a7ac43193e","Type":"ContainerDied","Data":"b45d974fd0b9ec0d134a7a5f1a38cfb392e6197c9f832b4ce019d85ba6e567b6"} Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.790237 5003 scope.go:117] "RemoveContainer" containerID="2a917c8c9a1b0b79a8d88507a4510407a06c086ac6094c34588c99f00987ce9d" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.790423 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mh5fg" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.794174 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" event={"ID":"094497ce-9c92-4b97-9817-6571ec8a5636","Type":"ContainerStarted","Data":"feb27c0dc0598e5b161261d1fd06ba7d1935a6ab5472e36991a7ce62d559b9f9"} Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.794764 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" event={"ID":"094497ce-9c92-4b97-9817-6571ec8a5636","Type":"ContainerStarted","Data":"f4274bb54556cf0ae2a917b1e8f3f9be0d3330f6c51e973b5b56684b86de7973"} Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.794847 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.803146 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.819818 5003 scope.go:117] "RemoveContainer" containerID="c29b0046d470ccce81ed26ac116532e3a0431209fd407a3c24ecebe3d26a06d4" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.839868 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5678f9c799-b9cc4" podStartSLOduration=29.839852713 podStartE2EDuration="29.839852713s" podCreationTimestamp="2026-01-26 10:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:46:52.828275106 +0000 UTC m=+228.369500757" watchObservedRunningTime="2026-01-26 10:46:52.839852713 +0000 UTC m=+228.381078274" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.841444 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mh5fg"] Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.851629 5003 scope.go:117] "RemoveContainer" containerID="ec1dee5d89d40e0ab371053534a262904a1ecfebfcb931b87d5b9f9b21f87a24" Jan 26 10:46:52 crc kubenswrapper[5003]: I0126 10:46:52.853027 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mh5fg"] Jan 26 10:46:53 crc kubenswrapper[5003]: I0126 10:46:53.009512 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" path="/var/lib/kubelet/pods/f4378c6f-f796-4576-91b9-87a7ac43193e/volumes" Jan 26 10:46:53 crc kubenswrapper[5003]: I0126 10:46:53.055073 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:46:53 crc kubenswrapper[5003]: I0126 10:46:53.102531 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:46:55 crc kubenswrapper[5003]: I0126 10:46:55.424933 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:46:55 crc kubenswrapper[5003]: I0126 10:46:55.447061 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:46:55 crc kubenswrapper[5003]: I0126 10:46:55.478050 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:46:55 crc kubenswrapper[5003]: I0126 10:46:55.853599 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.792434 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.792894 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerName="extract-content" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.792925 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerName="extract-content" Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.792952 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerName="registry-server" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.792971 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerName="registry-server" Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.793000 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerName="extract-utilities" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.793021 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerName="extract-utilities" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.793334 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4378c6f-f796-4576-91b9-87a7ac43193e" containerName="registry-server" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794013 5003 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794178 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794552 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794563 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c" gracePeriod=15 Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794668 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9" gracePeriod=15 Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794785 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8" gracePeriod=15 Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794810 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee" gracePeriod=15 Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.794859 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794877 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.794891 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794899 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.794914 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794922 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.794930 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794937 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.794953 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794961 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 26 10:46:58 crc kubenswrapper[5003]: E0126 10:46:58.794977 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794586 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960" gracePeriod=15 Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.794984 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.795246 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.795262 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.795276 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.795299 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.795316 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.800778 5003 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.829789 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.984431 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.984566 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.984610 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.984647 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.984782 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.984872 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.984938 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:58 crc kubenswrapper[5003]: I0126 10:46:58.985095 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088084 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088167 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088190 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088223 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088302 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088341 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088357 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088419 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088378 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088705 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.089317 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088446 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088466 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.089565 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.088486 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.089821 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.128344 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.149974 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.150845 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.151120 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.151368 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.151715 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.151790 5003 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.152476 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="200ms" Jan 26 10:46:59 crc kubenswrapper[5003]: W0126 10:46:59.156471 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-fb50ad822d9852e86e7c84b25dfd9d793bdea54c31e3bfacaf54c8de65d9ccce WatchSource:0}: Error finding container fb50ad822d9852e86e7c84b25dfd9d793bdea54c31e3bfacaf54c8de65d9ccce: Status 404 returned error can't find the container with id fb50ad822d9852e86e7c84b25dfd9d793bdea54c31e3bfacaf54c8de65d9ccce Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.159886 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.192:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188e422268e563d9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 10:46:59.159368665 +0000 UTC m=+234.700594246,LastTimestamp:2026-01-26 10:46:59.159368665 +0000 UTC m=+234.700594246,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.353737 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="400ms" Jan 26 10:46:59 crc kubenswrapper[5003]: E0126 10:46:59.755485 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="800ms" Jan 26 10:46:59 crc kubenswrapper[5003]: I0126 10:46:59.849638 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"fb50ad822d9852e86e7c84b25dfd9d793bdea54c31e3bfacaf54c8de65d9ccce"} Jan 26 10:47:00 crc kubenswrapper[5003]: E0126 10:47:00.559255 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="1.6s" Jan 26 10:47:02 crc kubenswrapper[5003]: E0126 10:47:02.159913 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="3.2s" Jan 26 10:47:02 crc kubenswrapper[5003]: E0126 10:47:02.633247 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.192:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188e422268e563d9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 10:46:59.159368665 +0000 UTC m=+234.700594246,LastTimestamp:2026-01-26 10:46:59.159368665 +0000 UTC m=+234.700594246,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.872105 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.872849 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960" exitCode=0 Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.872880 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee" exitCode=0 Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.872890 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9" exitCode=0 Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.872898 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8" exitCode=2 Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.872907 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c" exitCode=0 Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.874329 5003 generic.go:334] "Generic (PLEG): container finished" podID="5eaf430b-7e00-4049-8e4a-afb533141643" containerID="ae1d5075a04894decd96a49315da7c3c66965eafdc64b55263d61660b4fa37c3" exitCode=0 Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.874388 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5eaf430b-7e00-4049-8e4a-afb533141643","Type":"ContainerDied","Data":"ae1d5075a04894decd96a49315da7c3c66965eafdc64b55263d61660b4fa37c3"} Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.875020 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.875498 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.875588 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"4959de5804989e9eb4513883f65bc9ea765b9a4a760677e19043d99faf7657d1"} Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.876617 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:02 crc kubenswrapper[5003]: I0126 10:47:02.877039 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.025810 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.026658 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.027098 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.027256 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.027501 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.054421 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.054526 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.054576 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.054960 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.055082 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.055107 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.066109 5003 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.066212 5003 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.066231 5003 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:03 crc kubenswrapper[5003]: E0126 10:47:03.795885 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:47:03Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:47:03Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:47:03Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T10:47:03Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: E0126 10:47:03.796541 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: E0126 10:47:03.796958 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: E0126 10:47:03.797473 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: E0126 10:47:03.798016 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: E0126 10:47:03.798046 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.886053 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.887257 5003 scope.go:117] "RemoveContainer" containerID="f9bddc86ef140f5e2e7ae371b33d8b355081a6d163797529b3042c70563bf960" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.887536 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.902754 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.903268 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.903712 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.912827 5003 scope.go:117] "RemoveContainer" containerID="8ff842c60ae0459adb775019bdad6f13a3d9d52e9c5b05d011ef0b9fa967f0ee" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.937147 5003 scope.go:117] "RemoveContainer" containerID="4746ce8bcac24c266602b016489dc3fde41e3a7a1335f1266f9974cb80e98dc9" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.956111 5003 scope.go:117] "RemoveContainer" containerID="0c24ae6a4e381e54e22f9922e7ec671dd58c16cce435b6dcafd0a45dd75b9ea8" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.968352 5003 scope.go:117] "RemoveContainer" containerID="9a5dc08e5357d79149ecd63bf86eed9d27bf514c6e8759332df42bed4baf3b3c" Jan 26 10:47:03 crc kubenswrapper[5003]: I0126 10:47:03.995171 5003 scope.go:117] "RemoveContainer" containerID="794ac13962ccad91fa359dda39759f1a56518f93982931579d9f111eaefdbf7a" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.139924 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.140546 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.140709 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.140843 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.181667 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-kubelet-dir\") pod \"5eaf430b-7e00-4049-8e4a-afb533141643\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.181786 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5eaf430b-7e00-4049-8e4a-afb533141643" (UID: "5eaf430b-7e00-4049-8e4a-afb533141643"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.181805 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5eaf430b-7e00-4049-8e4a-afb533141643-kube-api-access\") pod \"5eaf430b-7e00-4049-8e4a-afb533141643\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.181849 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-var-lock\") pod \"5eaf430b-7e00-4049-8e4a-afb533141643\" (UID: \"5eaf430b-7e00-4049-8e4a-afb533141643\") " Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.181973 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-var-lock" (OuterVolumeSpecName: "var-lock") pod "5eaf430b-7e00-4049-8e4a-afb533141643" (UID: "5eaf430b-7e00-4049-8e4a-afb533141643"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.182097 5003 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-var-lock\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.182110 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5eaf430b-7e00-4049-8e4a-afb533141643-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.186755 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eaf430b-7e00-4049-8e4a-afb533141643-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5eaf430b-7e00-4049-8e4a-afb533141643" (UID: "5eaf430b-7e00-4049-8e4a-afb533141643"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.283764 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5eaf430b-7e00-4049-8e4a-afb533141643-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.895783 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5eaf430b-7e00-4049-8e4a-afb533141643","Type":"ContainerDied","Data":"851d923cdf4e09260900907c4f2efbf5c500554352a24e8044df052f51b96559"} Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.895863 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="851d923cdf4e09260900907c4f2efbf5c500554352a24e8044df052f51b96559" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.895823 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.910398 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.910604 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:04 crc kubenswrapper[5003]: I0126 10:47:04.911153 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:05 crc kubenswrapper[5003]: I0126 10:47:05.009898 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:05 crc kubenswrapper[5003]: I0126 10:47:05.009967 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 26 10:47:05 crc kubenswrapper[5003]: I0126 10:47:05.010221 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:05 crc kubenswrapper[5003]: I0126 10:47:05.010661 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:05 crc kubenswrapper[5003]: E0126 10:47:05.361735 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="6.4s" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.001630 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.002838 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.003566 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.022555 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.022609 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:11 crc kubenswrapper[5003]: E0126 10:47:11.023365 5003 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.023892 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:11 crc kubenswrapper[5003]: W0126 10:47:11.051058 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-49d2ff3e72c3685053dd1b1da7e9047259459d42f745ef10ccb817048c2d0bf0 WatchSource:0}: Error finding container 49d2ff3e72c3685053dd1b1da7e9047259459d42f745ef10ccb817048c2d0bf0: Status 404 returned error can't find the container with id 49d2ff3e72c3685053dd1b1da7e9047259459d42f745ef10ccb817048c2d0bf0 Jan 26 10:47:11 crc kubenswrapper[5003]: E0126 10:47:11.763078 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.192:6443: connect: connection refused" interval="7s" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.936164 5003 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="3ddac975dc25bcadd3ca0af6ac073d9c66e695af38bb3a490e44267c9614a6f5" exitCode=0 Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.936300 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"3ddac975dc25bcadd3ca0af6ac073d9c66e695af38bb3a490e44267c9614a6f5"} Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.936379 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"49d2ff3e72c3685053dd1b1da7e9047259459d42f745ef10ccb817048c2d0bf0"} Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.936960 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.936996 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.937252 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:11 crc kubenswrapper[5003]: E0126 10:47:11.937517 5003 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.937573 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.940636 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.940685 5003 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3" exitCode=1 Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.940715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3"} Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.941057 5003 scope.go:117] "RemoveContainer" containerID="d8cf8c1ac8f07642dd987cf5055ded31aa026e372161f86e6b6aeb4eee954be3" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.941632 5003 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.942035 5003 status_manager.go:851] "Failed to get status for pod" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:11 crc kubenswrapper[5003]: I0126 10:47:11.942493 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.192:6443: connect: connection refused" Jan 26 10:47:12 crc kubenswrapper[5003]: E0126 10:47:12.634931 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.192:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188e422268e563d9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 10:46:59.159368665 +0000 UTC m=+234.700594246,LastTimestamp:2026-01-26 10:46:59.159368665 +0000 UTC m=+234.700594246,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 10:47:12 crc kubenswrapper[5003]: I0126 10:47:12.949693 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"546ffc2922e8abaf8535a1be610db3fdd03b0b265e7b586c98b076bf730d5bf5"} Jan 26 10:47:12 crc kubenswrapper[5003]: I0126 10:47:12.949737 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e8eef1395ded151f1ddac694a18916333b00287cbec881a82296c234ed59fa3f"} Jan 26 10:47:12 crc kubenswrapper[5003]: I0126 10:47:12.953107 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 26 10:47:12 crc kubenswrapper[5003]: I0126 10:47:12.953137 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9c4eabd68f9a0b9b6f51af2f9ca84a6816fa3e51eae8a011f6639c58ca0b6ff1"} Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.085639 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.089536 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.967059 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.967662 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.967443 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"febfcc044ded0dbcda9f8fa3865e6e02061892948f893056d26f8a3f7fefa7dd"} Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.967752 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.967773 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b4d27f83d1f8a2bba1f5a8fc31a772687679a2b59305a5c655f02c474cd8d3b1"} Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.967792 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:13 crc kubenswrapper[5003]: I0126 10:47:13.967801 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f3eeb161beb48b99329cc21a5ee8c74a86d391dc3ee1db3f31927c1ad7fac578"} Jan 26 10:47:16 crc kubenswrapper[5003]: I0126 10:47:16.024188 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:16 crc kubenswrapper[5003]: I0126 10:47:16.024337 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:16 crc kubenswrapper[5003]: I0126 10:47:16.030946 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:18 crc kubenswrapper[5003]: I0126 10:47:18.984004 5003 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:19 crc kubenswrapper[5003]: I0126 10:47:19.075914 5003 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="38723cbd-2579-4be8-aeff-802f389bbb63" Jan 26 10:47:19 crc kubenswrapper[5003]: I0126 10:47:19.999034 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:19 crc kubenswrapper[5003]: I0126 10:47:19.999071 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:20 crc kubenswrapper[5003]: I0126 10:47:20.003232 5003 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="38723cbd-2579-4be8-aeff-802f389bbb63" Jan 26 10:47:20 crc kubenswrapper[5003]: I0126 10:47:20.003974 5003 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://e8eef1395ded151f1ddac694a18916333b00287cbec881a82296c234ed59fa3f" Jan 26 10:47:20 crc kubenswrapper[5003]: I0126 10:47:20.004017 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:21 crc kubenswrapper[5003]: I0126 10:47:21.003174 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:21 crc kubenswrapper[5003]: I0126 10:47:21.003202 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="de168e2a-6762-4792-8f48-4c754032f74a" Jan 26 10:47:21 crc kubenswrapper[5003]: I0126 10:47:21.006031 5003 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="38723cbd-2579-4be8-aeff-802f389bbb63" Jan 26 10:47:28 crc kubenswrapper[5003]: I0126 10:47:28.799189 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 26 10:47:29 crc kubenswrapper[5003]: I0126 10:47:29.675558 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 26 10:47:29 crc kubenswrapper[5003]: I0126 10:47:29.721423 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 26 10:47:29 crc kubenswrapper[5003]: I0126 10:47:29.748090 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 26 10:47:30 crc kubenswrapper[5003]: I0126 10:47:30.025946 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 26 10:47:30 crc kubenswrapper[5003]: I0126 10:47:30.371698 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 10:47:30 crc kubenswrapper[5003]: I0126 10:47:30.399642 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 26 10:47:30 crc kubenswrapper[5003]: I0126 10:47:30.550053 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 26 10:47:30 crc kubenswrapper[5003]: I0126 10:47:30.880139 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 26 10:47:31 crc kubenswrapper[5003]: I0126 10:47:31.010500 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 26 10:47:31 crc kubenswrapper[5003]: I0126 10:47:31.157073 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 26 10:47:31 crc kubenswrapper[5003]: I0126 10:47:31.334304 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 26 10:47:31 crc kubenswrapper[5003]: I0126 10:47:31.346809 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 26 10:47:31 crc kubenswrapper[5003]: I0126 10:47:31.393915 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 26 10:47:31 crc kubenswrapper[5003]: I0126 10:47:31.742984 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 26 10:47:31 crc kubenswrapper[5003]: I0126 10:47:31.751800 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 10:47:31 crc kubenswrapper[5003]: I0126 10:47:31.830729 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.181900 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.249529 5003 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.443401 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.575804 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.597722 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.598071 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.749175 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.755049 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.781698 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.879826 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 26 10:47:32 crc kubenswrapper[5003]: I0126 10:47:32.940132 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.004517 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.034524 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.189977 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.230981 5003 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.260054 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.364888 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.375759 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.376010 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.379813 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.525470 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.627478 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.680372 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.714858 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.759750 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.839588 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.861734 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 26 10:47:33 crc kubenswrapper[5003]: I0126 10:47:33.925195 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.014089 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.094254 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.148332 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.244662 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.296893 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.496998 5003 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.508311 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.655331 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.662063 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.670321 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.772910 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.778232 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 26 10:47:34 crc kubenswrapper[5003]: I0126 10:47:34.839031 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.009808 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.045437 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.067686 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.125985 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.140743 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.261009 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.310150 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.436044 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.589173 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.630504 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.657797 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.936176 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 26 10:47:35 crc kubenswrapper[5003]: I0126 10:47:35.974452 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.007260 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.088571 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.168026 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.182771 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.219974 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.258395 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.307098 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.337301 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.363888 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.405553 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.470700 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.498114 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.502608 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.508327 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.547140 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.602497 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.621162 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.626165 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.634191 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.748327 5003 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.750337 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=38.75031932 podStartE2EDuration="38.75031932s" podCreationTimestamp="2026-01-26 10:46:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:47:18.991489413 +0000 UTC m=+254.532714974" watchObservedRunningTime="2026-01-26 10:47:36.75031932 +0000 UTC m=+272.291544891" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.752949 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.753034 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.756854 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.770167 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=18.770152341 podStartE2EDuration="18.770152341s" podCreationTimestamp="2026-01-26 10:47:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:47:36.767531757 +0000 UTC m=+272.308757308" watchObservedRunningTime="2026-01-26 10:47:36.770152341 +0000 UTC m=+272.311377892" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.844518 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.912769 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.918970 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 26 10:47:36 crc kubenswrapper[5003]: I0126 10:47:36.967151 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.151801 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.318988 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.336660 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.354627 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.356740 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.443876 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.476793 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.478103 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.496317 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.536933 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.584177 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.623774 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.660647 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.693817 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.801818 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.818255 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 26 10:47:37 crc kubenswrapper[5003]: I0126 10:47:37.912019 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.056846 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.134611 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.217135 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.275615 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.402216 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.429812 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.443599 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.569738 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.648912 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.718997 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.733090 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.755998 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.764860 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.776347 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.813167 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.895978 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.918972 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.935181 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.935389 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.935439 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.952132 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 26 10:47:38 crc kubenswrapper[5003]: I0126 10:47:38.977522 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.081956 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.096677 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.158190 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.179404 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.191845 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.209564 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.240247 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.247595 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.287139 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.336836 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.397422 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.434949 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.472039 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.579198 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.604410 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.700611 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.705952 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.767368 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.823368 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.833476 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 26 10:47:39 crc kubenswrapper[5003]: I0126 10:47:39.878999 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.085567 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.172854 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.226018 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.338312 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.340783 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.396310 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.419989 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.476026 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.593433 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.620144 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.683649 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.774736 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.859395 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.931960 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 26 10:47:40 crc kubenswrapper[5003]: I0126 10:47:40.957420 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.046820 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.107674 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.112969 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.175991 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.232428 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.235954 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.260060 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.273475 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.275765 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.289171 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.319265 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.397098 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.468224 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.490955 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.504811 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.592299 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.597679 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.612724 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.663189 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.673022 5003 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.673306 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://4959de5804989e9eb4513883f65bc9ea765b9a4a760677e19043d99faf7657d1" gracePeriod=5 Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.687639 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.911741 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.931531 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.963123 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 26 10:47:41 crc kubenswrapper[5003]: I0126 10:47:41.980953 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.103562 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.113255 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.140203 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.209846 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.225594 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.306391 5003 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.455150 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.502236 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.563445 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.618318 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.637547 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.829624 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.859366 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.906238 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 26 10:47:42 crc kubenswrapper[5003]: I0126 10:47:42.956367 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.057459 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.059289 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.160502 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.190884 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.217354 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.340491 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.347800 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.363868 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.422690 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.458357 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.550005 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.611701 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.681166 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.734710 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.744828 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.756872 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 26 10:47:43 crc kubenswrapper[5003]: I0126 10:47:43.920461 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.017620 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.132145 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.281619 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.306423 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.326423 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.328059 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.419898 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.614882 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.626863 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.744106 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.888558 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 26 10:47:44 crc kubenswrapper[5003]: I0126 10:47:44.972141 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 26 10:47:45 crc kubenswrapper[5003]: I0126 10:47:45.027732 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 26 10:47:45 crc kubenswrapper[5003]: I0126 10:47:45.184547 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 26 10:47:45 crc kubenswrapper[5003]: I0126 10:47:45.373962 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 26 10:47:45 crc kubenswrapper[5003]: I0126 10:47:45.546864 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 26 10:47:46 crc kubenswrapper[5003]: I0126 10:47:45.673848 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 26 10:47:46 crc kubenswrapper[5003]: I0126 10:47:45.890448 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 26 10:47:46 crc kubenswrapper[5003]: I0126 10:47:45.908349 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 26 10:47:46 crc kubenswrapper[5003]: I0126 10:47:46.007200 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 26 10:47:46 crc kubenswrapper[5003]: I0126 10:47:46.022518 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 26 10:47:46 crc kubenswrapper[5003]: I0126 10:47:46.637503 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 26 10:47:46 crc kubenswrapper[5003]: I0126 10:47:46.753424 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.142192 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.142273 5003 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="4959de5804989e9eb4513883f65bc9ea765b9a4a760677e19043d99faf7657d1" exitCode=137 Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.164738 5003 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.248110 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.275196 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.275302 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.334813 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423354 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423423 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423476 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423509 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423561 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423701 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423742 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423717 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.423766 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.424262 5003 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.424377 5003 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.424403 5003 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.424427 5003 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.431583 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:47:47 crc kubenswrapper[5003]: I0126 10:47:47.525526 5003 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:48 crc kubenswrapper[5003]: I0126 10:47:48.084229 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 26 10:47:48 crc kubenswrapper[5003]: I0126 10:47:48.152629 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 26 10:47:48 crc kubenswrapper[5003]: I0126 10:47:48.152700 5003 scope.go:117] "RemoveContainer" containerID="4959de5804989e9eb4513883f65bc9ea765b9a4a760677e19043d99faf7657d1" Jan 26 10:47:48 crc kubenswrapper[5003]: I0126 10:47:48.152770 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 10:47:49 crc kubenswrapper[5003]: I0126 10:47:49.009135 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 26 10:47:49 crc kubenswrapper[5003]: I0126 10:47:49.009407 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 26 10:47:49 crc kubenswrapper[5003]: I0126 10:47:49.021070 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 10:47:49 crc kubenswrapper[5003]: I0126 10:47:49.021103 5003 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="88172520-1c82-4e97-ae83-e9cc2d71e791" Jan 26 10:47:49 crc kubenswrapper[5003]: I0126 10:47:49.025157 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 10:47:49 crc kubenswrapper[5003]: I0126 10:47:49.025180 5003 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="88172520-1c82-4e97-ae83-e9cc2d71e791" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.847620 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nlxw6"] Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.848519 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nlxw6" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" containerName="registry-server" containerID="cri-o://f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f" gracePeriod=30 Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.863207 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4nzsj"] Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.863587 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4nzsj" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="registry-server" containerID="cri-o://c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d" gracePeriod=30 Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.869685 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsgg6"] Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.869925 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" podUID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" containerName="marketplace-operator" containerID="cri-o://4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4" gracePeriod=30 Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.879684 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbjvl"] Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.879938 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qbjvl" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerName="registry-server" containerID="cri-o://1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c" gracePeriod=30 Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.886235 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7j6tj"] Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.887446 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7j6tj" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerName="registry-server" containerID="cri-o://d259242256f4f23244924df8049cf24aa283167e6f30e8710a41962afdd1a514" gracePeriod=30 Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.899759 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ljw7m"] Jan 26 10:47:56 crc kubenswrapper[5003]: E0126 10:47:56.900048 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.900068 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 10:47:56 crc kubenswrapper[5003]: E0126 10:47:56.900091 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" containerName="installer" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.900101 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" containerName="installer" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.900308 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eaf430b-7e00-4049-8e4a-afb533141643" containerName="installer" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.900334 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.900798 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.908670 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ljw7m"] Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.956331 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f101492-8469-482f-a258-7a3a4e9fade0-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.956411 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f101492-8469-482f-a258-7a3a4e9fade0-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:56 crc kubenswrapper[5003]: I0126 10:47:56.956479 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcl65\" (UniqueName: \"kubernetes.io/projected/4f101492-8469-482f-a258-7a3a4e9fade0-kube-api-access-kcl65\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.057225 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f101492-8469-482f-a258-7a3a4e9fade0-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.057323 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f101492-8469-482f-a258-7a3a4e9fade0-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.057359 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcl65\" (UniqueName: \"kubernetes.io/projected/4f101492-8469-482f-a258-7a3a4e9fade0-kube-api-access-kcl65\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.058880 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f101492-8469-482f-a258-7a3a4e9fade0-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.062833 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f101492-8469-482f-a258-7a3a4e9fade0-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.074501 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcl65\" (UniqueName: \"kubernetes.io/projected/4f101492-8469-482f-a258-7a3a4e9fade0-kube-api-access-kcl65\") pod \"marketplace-operator-79b997595-ljw7m\" (UID: \"4f101492-8469-482f-a258-7a3a4e9fade0\") " pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.225883 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.413133 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ljw7m"] Jan 26 10:47:57 crc kubenswrapper[5003]: W0126 10:47:57.444360 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f101492_8469_482f_a258_7a3a4e9fade0.slice/crio-5fa9f4409277710c47130a1a638ff783ca2a30afc6eb8955491b859630fcdc9a WatchSource:0}: Error finding container 5fa9f4409277710c47130a1a638ff783ca2a30afc6eb8955491b859630fcdc9a: Status 404 returned error can't find the container with id 5fa9f4409277710c47130a1a638ff783ca2a30afc6eb8955491b859630fcdc9a Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.691109 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.694129 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.749804 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.765314 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-operator-metrics\") pod \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.765441 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqnqd\" (UniqueName: \"kubernetes.io/projected/a6f195f7-8805-422e-b316-c57c71a27a38-kube-api-access-fqnqd\") pod \"a6f195f7-8805-422e-b316-c57c71a27a38\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.767560 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-utilities\") pod \"a6f195f7-8805-422e-b316-c57c71a27a38\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.768355 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-utilities" (OuterVolumeSpecName: "utilities") pod "a6f195f7-8805-422e-b316-c57c71a27a38" (UID: "a6f195f7-8805-422e-b316-c57c71a27a38"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.770143 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-catalog-content\") pod \"056db06f-766d-4393-87b8-4148b3f4c3c9\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.770941 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-trusted-ca\") pod \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.771141 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-catalog-content\") pod \"a6f195f7-8805-422e-b316-c57c71a27a38\" (UID: \"a6f195f7-8805-422e-b316-c57c71a27a38\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.771310 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4z7ql\" (UniqueName: \"kubernetes.io/projected/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-kube-api-access-4z7ql\") pod \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\" (UID: \"a3f59cd7-44a7-4d88-a8bb-7108b70efa58\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.771395 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "a3f59cd7-44a7-4d88-a8bb-7108b70efa58" (UID: "a3f59cd7-44a7-4d88-a8bb-7108b70efa58"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.772155 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.772181 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.773373 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6f195f7-8805-422e-b316-c57c71a27a38-kube-api-access-fqnqd" (OuterVolumeSpecName: "kube-api-access-fqnqd") pod "a6f195f7-8805-422e-b316-c57c71a27a38" (UID: "a6f195f7-8805-422e-b316-c57c71a27a38"). InnerVolumeSpecName "kube-api-access-fqnqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.774925 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-kube-api-access-4z7ql" (OuterVolumeSpecName: "kube-api-access-4z7ql") pod "a3f59cd7-44a7-4d88-a8bb-7108b70efa58" (UID: "a3f59cd7-44a7-4d88-a8bb-7108b70efa58"). InnerVolumeSpecName "kube-api-access-4z7ql". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.776387 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "a3f59cd7-44a7-4d88-a8bb-7108b70efa58" (UID: "a3f59cd7-44a7-4d88-a8bb-7108b70efa58"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.822902 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6f195f7-8805-422e-b316-c57c71a27a38" (UID: "a6f195f7-8805-422e-b316-c57c71a27a38"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.837885 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "056db06f-766d-4393-87b8-4148b3f4c3c9" (UID: "056db06f-766d-4393-87b8-4148b3f4c3c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.874152 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-utilities\") pod \"056db06f-766d-4393-87b8-4148b3f4c3c9\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.874300 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ckvh\" (UniqueName: \"kubernetes.io/projected/056db06f-766d-4393-87b8-4148b3f4c3c9-kube-api-access-9ckvh\") pod \"056db06f-766d-4393-87b8-4148b3f4c3c9\" (UID: \"056db06f-766d-4393-87b8-4148b3f4c3c9\") " Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.874503 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.874515 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqnqd\" (UniqueName: \"kubernetes.io/projected/a6f195f7-8805-422e-b316-c57c71a27a38-kube-api-access-fqnqd\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.874525 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.874535 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f195f7-8805-422e-b316-c57c71a27a38-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.874543 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4z7ql\" (UniqueName: \"kubernetes.io/projected/a3f59cd7-44a7-4d88-a8bb-7108b70efa58-kube-api-access-4z7ql\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.875868 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-utilities" (OuterVolumeSpecName: "utilities") pod "056db06f-766d-4393-87b8-4148b3f4c3c9" (UID: "056db06f-766d-4393-87b8-4148b3f4c3c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:47:57 crc kubenswrapper[5003]: I0126 10:47:57.877182 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/056db06f-766d-4393-87b8-4148b3f4c3c9-kube-api-access-9ckvh" (OuterVolumeSpecName: "kube-api-access-9ckvh") pod "056db06f-766d-4393-87b8-4148b3f4c3c9" (UID: "056db06f-766d-4393-87b8-4148b3f4c3c9"). InnerVolumeSpecName "kube-api-access-9ckvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.007579 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/056db06f-766d-4393-87b8-4148b3f4c3c9-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.007751 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ckvh\" (UniqueName: \"kubernetes.io/projected/056db06f-766d-4393-87b8-4148b3f4c3c9-kube-api-access-9ckvh\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.190327 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.208097 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" event={"ID":"4f101492-8469-482f-a258-7a3a4e9fade0","Type":"ContainerStarted","Data":"f4cb3d5e5146f17e823020a07637182e877f6f0a00e312c068fd2293ad826d35"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.208144 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" event={"ID":"4f101492-8469-482f-a258-7a3a4e9fade0","Type":"ContainerStarted","Data":"5fa9f4409277710c47130a1a638ff783ca2a30afc6eb8955491b859630fcdc9a"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.211238 5003 generic.go:334] "Generic (PLEG): container finished" podID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerID="c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d" exitCode=0 Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.211315 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4nzsj" event={"ID":"056db06f-766d-4393-87b8-4148b3f4c3c9","Type":"ContainerDied","Data":"c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.211335 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4nzsj" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.211357 5003 scope.go:117] "RemoveContainer" containerID="c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.211344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4nzsj" event={"ID":"056db06f-766d-4393-87b8-4148b3f4c3c9","Type":"ContainerDied","Data":"7414fcc63b56498f28c3649fbf658944eea8991363a5e49e911474d6c8391fc8"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.214885 5003 generic.go:334] "Generic (PLEG): container finished" podID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerID="d259242256f4f23244924df8049cf24aa283167e6f30e8710a41962afdd1a514" exitCode=0 Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.214928 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j6tj" event={"ID":"36a0e821-b752-4299-a9ec-1c719bdf5b2c","Type":"ContainerDied","Data":"d259242256f4f23244924df8049cf24aa283167e6f30e8710a41962afdd1a514"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.220189 5003 generic.go:334] "Generic (PLEG): container finished" podID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" containerID="4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4" exitCode=0 Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.220243 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" event={"ID":"a3f59cd7-44a7-4d88-a8bb-7108b70efa58","Type":"ContainerDied","Data":"4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.220268 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" event={"ID":"a3f59cd7-44a7-4d88-a8bb-7108b70efa58","Type":"ContainerDied","Data":"08740a078e7d6b3529d973c5e435c6979831e7499be250068ec54714b1c82fa5"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.220353 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xsgg6" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.241496 5003 generic.go:334] "Generic (PLEG): container finished" podID="a6f195f7-8805-422e-b316-c57c71a27a38" containerID="f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f" exitCode=0 Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.241668 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nlxw6" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.243466 5003 scope.go:117] "RemoveContainer" containerID="dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.243827 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlxw6" event={"ID":"a6f195f7-8805-422e-b316-c57c71a27a38","Type":"ContainerDied","Data":"f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.243859 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nlxw6" event={"ID":"a6f195f7-8805-422e-b316-c57c71a27a38","Type":"ContainerDied","Data":"3976cb8b3ece0126119bba72cc9201d03e1e31330b7b015502fa12a93eeb560a"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.264310 5003 generic.go:334] "Generic (PLEG): container finished" podID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerID="1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c" exitCode=0 Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.264355 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbjvl" event={"ID":"afdef7f7-32b5-4976-881a-398dc09ac9bd","Type":"ContainerDied","Data":"1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.264384 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbjvl" event={"ID":"afdef7f7-32b5-4976-881a-398dc09ac9bd","Type":"ContainerDied","Data":"57800e056704c775f80bb31762939c6e47027c2abe7b65f3e52f07507ed965c1"} Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.264457 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbjvl" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.267127 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4nzsj"] Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.275038 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4nzsj"] Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.296342 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nlxw6"] Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.300008 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nlxw6"] Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.301436 5003 scope.go:117] "RemoveContainer" containerID="5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.316179 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-catalog-content\") pod \"afdef7f7-32b5-4976-881a-398dc09ac9bd\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.316230 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-utilities\") pod \"afdef7f7-32b5-4976-881a-398dc09ac9bd\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.316323 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksvrq\" (UniqueName: \"kubernetes.io/projected/afdef7f7-32b5-4976-881a-398dc09ac9bd-kube-api-access-ksvrq\") pod \"afdef7f7-32b5-4976-881a-398dc09ac9bd\" (UID: \"afdef7f7-32b5-4976-881a-398dc09ac9bd\") " Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.318598 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-utilities" (OuterVolumeSpecName: "utilities") pod "afdef7f7-32b5-4976-881a-398dc09ac9bd" (UID: "afdef7f7-32b5-4976-881a-398dc09ac9bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.319360 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsgg6"] Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.322754 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsgg6"] Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.330090 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afdef7f7-32b5-4976-881a-398dc09ac9bd-kube-api-access-ksvrq" (OuterVolumeSpecName: "kube-api-access-ksvrq") pod "afdef7f7-32b5-4976-881a-398dc09ac9bd" (UID: "afdef7f7-32b5-4976-881a-398dc09ac9bd"). InnerVolumeSpecName "kube-api-access-ksvrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.342152 5003 scope.go:117] "RemoveContainer" containerID="c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.343084 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d\": container with ID starting with c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d not found: ID does not exist" containerID="c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.343122 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d"} err="failed to get container status \"c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d\": rpc error: code = NotFound desc = could not find container \"c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d\": container with ID starting with c05a9502523e8b8269e9eb3f5942a59087d4b779676213904e89cae6d867016d not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.343156 5003 scope.go:117] "RemoveContainer" containerID="dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.343398 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9\": container with ID starting with dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9 not found: ID does not exist" containerID="dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.343432 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9"} err="failed to get container status \"dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9\": rpc error: code = NotFound desc = could not find container \"dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9\": container with ID starting with dd7b0b6150fee93bab970c1492263dc93e701ac83090c654db1af0112d19e7c9 not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.343450 5003 scope.go:117] "RemoveContainer" containerID="5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.343661 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975\": container with ID starting with 5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975 not found: ID does not exist" containerID="5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.343686 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975"} err="failed to get container status \"5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975\": rpc error: code = NotFound desc = could not find container \"5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975\": container with ID starting with 5355513c6d1dde2479d5b40b8d1cc96445f9241657ce3d356cb7d2420c7b0975 not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.343699 5003 scope.go:117] "RemoveContainer" containerID="4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.353720 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.359497 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "afdef7f7-32b5-4976-881a-398dc09ac9bd" (UID: "afdef7f7-32b5-4976-881a-398dc09ac9bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.379716 5003 scope.go:117] "RemoveContainer" containerID="4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.380777 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4\": container with ID starting with 4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4 not found: ID does not exist" containerID="4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.380890 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4"} err="failed to get container status \"4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4\": rpc error: code = NotFound desc = could not find container \"4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4\": container with ID starting with 4277e048d62cdbbe31da4607945280b845f8cb2304a41ade0efc5a5a3ef134d4 not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.381007 5003 scope.go:117] "RemoveContainer" containerID="f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.400059 5003 scope.go:117] "RemoveContainer" containerID="330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.417929 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-utilities\") pod \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.418009 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lbq9\" (UniqueName: \"kubernetes.io/projected/36a0e821-b752-4299-a9ec-1c719bdf5b2c-kube-api-access-8lbq9\") pod \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.418050 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-catalog-content\") pod \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\" (UID: \"36a0e821-b752-4299-a9ec-1c719bdf5b2c\") " Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.418310 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.418329 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afdef7f7-32b5-4976-881a-398dc09ac9bd-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.418340 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksvrq\" (UniqueName: \"kubernetes.io/projected/afdef7f7-32b5-4976-881a-398dc09ac9bd-kube-api-access-ksvrq\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.418924 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-utilities" (OuterVolumeSpecName: "utilities") pod "36a0e821-b752-4299-a9ec-1c719bdf5b2c" (UID: "36a0e821-b752-4299-a9ec-1c719bdf5b2c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.421864 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36a0e821-b752-4299-a9ec-1c719bdf5b2c-kube-api-access-8lbq9" (OuterVolumeSpecName: "kube-api-access-8lbq9") pod "36a0e821-b752-4299-a9ec-1c719bdf5b2c" (UID: "36a0e821-b752-4299-a9ec-1c719bdf5b2c"). InnerVolumeSpecName "kube-api-access-8lbq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.421958 5003 scope.go:117] "RemoveContainer" containerID="314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.435607 5003 scope.go:117] "RemoveContainer" containerID="f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.436156 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f\": container with ID starting with f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f not found: ID does not exist" containerID="f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.436199 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f"} err="failed to get container status \"f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f\": rpc error: code = NotFound desc = could not find container \"f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f\": container with ID starting with f960fad4688dcbdabdd80b05e3504bccf43eb7a616f36b3c2a19b08c5fe26c0f not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.436232 5003 scope.go:117] "RemoveContainer" containerID="330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.436842 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5\": container with ID starting with 330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5 not found: ID does not exist" containerID="330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.436881 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5"} err="failed to get container status \"330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5\": rpc error: code = NotFound desc = could not find container \"330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5\": container with ID starting with 330c394ad929b6aa35e1e51ad90b232053d097ef196cef80686959bd0b283db5 not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.436902 5003 scope.go:117] "RemoveContainer" containerID="314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.437151 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0\": container with ID starting with 314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0 not found: ID does not exist" containerID="314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.437204 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0"} err="failed to get container status \"314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0\": rpc error: code = NotFound desc = could not find container \"314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0\": container with ID starting with 314896be1f8dedc9a636fa2c2264004319ce86437579786634353b0fb6d159d0 not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.437234 5003 scope.go:117] "RemoveContainer" containerID="1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.449338 5003 scope.go:117] "RemoveContainer" containerID="dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.466379 5003 scope.go:117] "RemoveContainer" containerID="a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.479888 5003 scope.go:117] "RemoveContainer" containerID="1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.480346 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c\": container with ID starting with 1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c not found: ID does not exist" containerID="1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.480392 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c"} err="failed to get container status \"1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c\": rpc error: code = NotFound desc = could not find container \"1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c\": container with ID starting with 1e51c1f396efa5116956df238e87b98a5777609f311f311b7fda93e3e1b9224c not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.480423 5003 scope.go:117] "RemoveContainer" containerID="dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.480772 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f\": container with ID starting with dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f not found: ID does not exist" containerID="dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.480797 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f"} err="failed to get container status \"dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f\": rpc error: code = NotFound desc = could not find container \"dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f\": container with ID starting with dbdf537045f411dd088ecd751c8720269f397fcd101b424198e87387cce5a32f not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.480811 5003 scope.go:117] "RemoveContainer" containerID="a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c" Jan 26 10:47:58 crc kubenswrapper[5003]: E0126 10:47:58.481090 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c\": container with ID starting with a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c not found: ID does not exist" containerID="a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.481135 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c"} err="failed to get container status \"a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c\": rpc error: code = NotFound desc = could not find container \"a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c\": container with ID starting with a026257d96006df22efee5a32457f8f9fae3faefbe47fa2b681eded786111c8c not found: ID does not exist" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.519646 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.519677 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lbq9\" (UniqueName: \"kubernetes.io/projected/36a0e821-b752-4299-a9ec-1c719bdf5b2c-kube-api-access-8lbq9\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.536085 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "36a0e821-b752-4299-a9ec-1c719bdf5b2c" (UID: "36a0e821-b752-4299-a9ec-1c719bdf5b2c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.589323 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbjvl"] Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.592991 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbjvl"] Jan 26 10:47:58 crc kubenswrapper[5003]: I0126 10:47:58.621820 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36a0e821-b752-4299-a9ec-1c719bdf5b2c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.007508 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" path="/var/lib/kubelet/pods/056db06f-766d-4393-87b8-4148b3f4c3c9/volumes" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.008352 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" path="/var/lib/kubelet/pods/a3f59cd7-44a7-4d88-a8bb-7108b70efa58/volumes" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.008915 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" path="/var/lib/kubelet/pods/a6f195f7-8805-422e-b316-c57c71a27a38/volumes" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.010116 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" path="/var/lib/kubelet/pods/afdef7f7-32b5-4976-881a-398dc09ac9bd/volumes" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.274635 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j6tj" event={"ID":"36a0e821-b752-4299-a9ec-1c719bdf5b2c","Type":"ContainerDied","Data":"5e464cd11cdbaec2dbe473b4287a74275d512878028272992f6c68d53cb3edc1"} Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.274667 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7j6tj" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.274723 5003 scope.go:117] "RemoveContainer" containerID="d259242256f4f23244924df8049cf24aa283167e6f30e8710a41962afdd1a514" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.275035 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.279010 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.290028 5003 scope.go:117] "RemoveContainer" containerID="61337665437faac32f969edb14e8efbfa5d1b813c7ed6ddb3c24669358d26eee" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.296414 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-ljw7m" podStartSLOduration=3.29639106 podStartE2EDuration="3.29639106s" podCreationTimestamp="2026-01-26 10:47:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:47:59.29267423 +0000 UTC m=+294.833899791" watchObservedRunningTime="2026-01-26 10:47:59.29639106 +0000 UTC m=+294.837616621" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.324046 5003 scope.go:117] "RemoveContainer" containerID="d914102da788edef2af168a55c3381d0e6c24fc8ca1bb32cef50fb16c14b4f9a" Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.327903 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7j6tj"] Jan 26 10:47:59 crc kubenswrapper[5003]: I0126 10:47:59.332060 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7j6tj"] Jan 26 10:48:01 crc kubenswrapper[5003]: I0126 10:48:01.010476 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" path="/var/lib/kubelet/pods/36a0e821-b752-4299-a9ec-1c719bdf5b2c/volumes" Jan 26 10:48:04 crc kubenswrapper[5003]: I0126 10:48:04.853308 5003 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.352042 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sl8wp"] Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.352749 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" podUID="6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" containerName="controller-manager" containerID="cri-o://7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859" gracePeriod=30 Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.484656 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8"] Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.485085 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" podUID="96cf2336-9c93-48fe-8d61-c9618714c1b2" containerName="route-controller-manager" containerID="cri-o://c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b" gracePeriod=30 Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.819073 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.897762 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.950520 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-client-ca\") pod \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.950620 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4v8f\" (UniqueName: \"kubernetes.io/projected/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-kube-api-access-m4v8f\") pod \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.950645 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-config\") pod \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.950696 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-serving-cert\") pod \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.950736 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-proxy-ca-bundles\") pod \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\" (UID: \"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea\") " Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.951988 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-client-ca" (OuterVolumeSpecName: "client-ca") pod "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" (UID: "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.952360 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-config" (OuterVolumeSpecName: "config") pod "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" (UID: "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.952734 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" (UID: "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.958294 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" (UID: "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:48:15 crc kubenswrapper[5003]: I0126 10:48:15.960013 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-kube-api-access-m4v8f" (OuterVolumeSpecName: "kube-api-access-m4v8f") pod "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" (UID: "6e70d335-0c77-41ee-a1a5-f0d4b7d28bea"). InnerVolumeSpecName "kube-api-access-m4v8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.051902 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-config\") pod \"96cf2336-9c93-48fe-8d61-c9618714c1b2\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.051999 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp7gz\" (UniqueName: \"kubernetes.io/projected/96cf2336-9c93-48fe-8d61-c9618714c1b2-kube-api-access-dp7gz\") pod \"96cf2336-9c93-48fe-8d61-c9618714c1b2\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.052056 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96cf2336-9c93-48fe-8d61-c9618714c1b2-serving-cert\") pod \"96cf2336-9c93-48fe-8d61-c9618714c1b2\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.052105 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-client-ca\") pod \"96cf2336-9c93-48fe-8d61-c9618714c1b2\" (UID: \"96cf2336-9c93-48fe-8d61-c9618714c1b2\") " Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.052359 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.052373 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.052382 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4v8f\" (UniqueName: \"kubernetes.io/projected/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-kube-api-access-m4v8f\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.052416 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.052425 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.053240 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-client-ca" (OuterVolumeSpecName: "client-ca") pod "96cf2336-9c93-48fe-8d61-c9618714c1b2" (UID: "96cf2336-9c93-48fe-8d61-c9618714c1b2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.053781 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-config" (OuterVolumeSpecName: "config") pod "96cf2336-9c93-48fe-8d61-c9618714c1b2" (UID: "96cf2336-9c93-48fe-8d61-c9618714c1b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.056941 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96cf2336-9c93-48fe-8d61-c9618714c1b2-kube-api-access-dp7gz" (OuterVolumeSpecName: "kube-api-access-dp7gz") pod "96cf2336-9c93-48fe-8d61-c9618714c1b2" (UID: "96cf2336-9c93-48fe-8d61-c9618714c1b2"). InnerVolumeSpecName "kube-api-access-dp7gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.057655 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96cf2336-9c93-48fe-8d61-c9618714c1b2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "96cf2336-9c93-48fe-8d61-c9618714c1b2" (UID: "96cf2336-9c93-48fe-8d61-c9618714c1b2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.153792 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96cf2336-9c93-48fe-8d61-c9618714c1b2-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.153841 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.153854 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96cf2336-9c93-48fe-8d61-c9618714c1b2-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.153868 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp7gz\" (UniqueName: \"kubernetes.io/projected/96cf2336-9c93-48fe-8d61-c9618714c1b2-kube-api-access-dp7gz\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.358911 5003 generic.go:334] "Generic (PLEG): container finished" podID="6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" containerID="7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859" exitCode=0 Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.358981 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" event={"ID":"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea","Type":"ContainerDied","Data":"7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859"} Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.359011 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" event={"ID":"6e70d335-0c77-41ee-a1a5-f0d4b7d28bea","Type":"ContainerDied","Data":"feeb13f786eb4e33ecb6e3ff5f9ff321f7450497ae2dd96be891e4668942f1e4"} Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.359030 5003 scope.go:117] "RemoveContainer" containerID="7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.359040 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-sl8wp" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.362242 5003 generic.go:334] "Generic (PLEG): container finished" podID="96cf2336-9c93-48fe-8d61-c9618714c1b2" containerID="c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b" exitCode=0 Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.362292 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.362296 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" event={"ID":"96cf2336-9c93-48fe-8d61-c9618714c1b2","Type":"ContainerDied","Data":"c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b"} Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.362443 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8" event={"ID":"96cf2336-9c93-48fe-8d61-c9618714c1b2","Type":"ContainerDied","Data":"2755e6155bc15e72a54981e900c57edf9c870bdb8e94b9c73c5666ce98d746bc"} Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.389918 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sl8wp"] Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.390387 5003 scope.go:117] "RemoveContainer" containerID="7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859" Jan 26 10:48:16 crc kubenswrapper[5003]: E0126 10:48:16.390858 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859\": container with ID starting with 7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859 not found: ID does not exist" containerID="7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.390886 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859"} err="failed to get container status \"7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859\": rpc error: code = NotFound desc = could not find container \"7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859\": container with ID starting with 7ecb527799154610ba6ac5abe008a5794eaeb04db74a1aa3de8fa80d3828e859 not found: ID does not exist" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.390905 5003 scope.go:117] "RemoveContainer" containerID="c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.396761 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-sl8wp"] Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.399773 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8"] Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.402415 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6w5n8"] Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.407102 5003 scope.go:117] "RemoveContainer" containerID="c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b" Jan 26 10:48:16 crc kubenswrapper[5003]: E0126 10:48:16.407658 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b\": container with ID starting with c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b not found: ID does not exist" containerID="c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b" Jan 26 10:48:16 crc kubenswrapper[5003]: I0126 10:48:16.407699 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b"} err="failed to get container status \"c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b\": rpc error: code = NotFound desc = could not find container \"c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b\": container with ID starting with c94829131eb8214b80972705e0a5a7e04ff7809cb8bda6f63b301bdc103a122b not found: ID does not exist" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.008967 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" path="/var/lib/kubelet/pods/6e70d335-0c77-41ee-a1a5-f0d4b7d28bea/volumes" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.009810 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96cf2336-9c93-48fe-8d61-c9618714c1b2" path="/var/lib/kubelet/pods/96cf2336-9c93-48fe-8d61-c9618714c1b2/volumes" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345391 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p"] Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.345744 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerName="extract-content" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345767 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerName="extract-content" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.345789 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="extract-content" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345801 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="extract-content" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.345822 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerName="extract-utilities" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345836 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerName="extract-utilities" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.345853 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345868 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.345889 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="extract-utilities" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345903 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="extract-utilities" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.345918 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345931 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.345953 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" containerName="marketplace-operator" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345965 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" containerName="marketplace-operator" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.345981 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" containerName="controller-manager" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.345995 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" containerName="controller-manager" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.346012 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerName="extract-content" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346025 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerName="extract-content" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.346043 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" containerName="extract-utilities" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346056 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" containerName="extract-utilities" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.346078 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerName="extract-utilities" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346092 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerName="extract-utilities" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.346106 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346118 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.346136 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96cf2336-9c93-48fe-8d61-c9618714c1b2" containerName="route-controller-manager" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346151 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="96cf2336-9c93-48fe-8d61-c9618714c1b2" containerName="route-controller-manager" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.346168 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" containerName="extract-content" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346181 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" containerName="extract-content" Jan 26 10:48:17 crc kubenswrapper[5003]: E0126 10:48:17.346204 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346217 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346405 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3f59cd7-44a7-4d88-a8bb-7108b70efa58" containerName="marketplace-operator" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346437 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="36a0e821-b752-4299-a9ec-1c719bdf5b2c" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346457 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="056db06f-766d-4393-87b8-4148b3f4c3c9" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346473 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="96cf2336-9c93-48fe-8d61-c9618714c1b2" containerName="route-controller-manager" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346490 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e70d335-0c77-41ee-a1a5-f0d4b7d28bea" containerName="controller-manager" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346504 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6f195f7-8805-422e-b316-c57c71a27a38" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.346522 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="afdef7f7-32b5-4976-881a-398dc09ac9bd" containerName="registry-server" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.347098 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.353182 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.356603 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.356724 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.356956 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.357201 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.357471 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.362199 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.413954 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf"] Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.415377 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.416243 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p"] Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.420086 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.420830 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.421544 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.421719 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.421882 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.422493 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.422948 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf"] Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.468176 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-proxy-ca-bundles\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.468271 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-config\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.468427 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz4sc\" (UniqueName: \"kubernetes.io/projected/6329ddc6-f735-4be0-b070-86af8cc71df7-kube-api-access-tz4sc\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.468475 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6329ddc6-f735-4be0-b070-86af8cc71df7-serving-cert\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.468543 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-client-ca\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569613 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-client-ca\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569656 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-config\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569685 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz4sc\" (UniqueName: \"kubernetes.io/projected/6329ddc6-f735-4be0-b070-86af8cc71df7-kube-api-access-tz4sc\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569703 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6329ddc6-f735-4be0-b070-86af8cc71df7-serving-cert\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569721 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9175efa-12f5-44f7-ab78-0a0ead34d311-serving-cert\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569750 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-client-ca\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569782 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-config\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569804 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-proxy-ca-bundles\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.569825 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cngvc\" (UniqueName: \"kubernetes.io/projected/e9175efa-12f5-44f7-ab78-0a0ead34d311-kube-api-access-cngvc\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.570860 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-client-ca\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.570914 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-proxy-ca-bundles\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.571162 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-config\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.576196 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6329ddc6-f735-4be0-b070-86af8cc71df7-serving-cert\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.588886 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz4sc\" (UniqueName: \"kubernetes.io/projected/6329ddc6-f735-4be0-b070-86af8cc71df7-kube-api-access-tz4sc\") pod \"controller-manager-7db8c4c9f8-5ln9p\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.670909 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-client-ca\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.670965 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9175efa-12f5-44f7-ab78-0a0ead34d311-serving-cert\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.671006 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-config\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.671034 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cngvc\" (UniqueName: \"kubernetes.io/projected/e9175efa-12f5-44f7-ab78-0a0ead34d311-kube-api-access-cngvc\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.672315 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-config\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.672344 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-client-ca\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.675102 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9175efa-12f5-44f7-ab78-0a0ead34d311-serving-cert\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.685830 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cngvc\" (UniqueName: \"kubernetes.io/projected/e9175efa-12f5-44f7-ab78-0a0ead34d311-kube-api-access-cngvc\") pod \"route-controller-manager-774d86fbb9-9p9qf\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.724226 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:17 crc kubenswrapper[5003]: I0126 10:48:17.733797 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.128097 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p"] Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.172240 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf"] Jan 26 10:48:18 crc kubenswrapper[5003]: W0126 10:48:18.177148 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9175efa_12f5_44f7_ab78_0a0ead34d311.slice/crio-eab2808a684860b1bcce0c49504aff044a5285a0914d5b3e47e2f2d2baf09c94 WatchSource:0}: Error finding container eab2808a684860b1bcce0c49504aff044a5285a0914d5b3e47e2f2d2baf09c94: Status 404 returned error can't find the container with id eab2808a684860b1bcce0c49504aff044a5285a0914d5b3e47e2f2d2baf09c94 Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.374865 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" event={"ID":"6329ddc6-f735-4be0-b070-86af8cc71df7","Type":"ContainerStarted","Data":"bd180edaa42a7ab61d4fd2111ba9ad69712e68f0b22d87ceec09fea95166d959"} Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.374916 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" event={"ID":"6329ddc6-f735-4be0-b070-86af8cc71df7","Type":"ContainerStarted","Data":"8a20e0519e77c0980019f45f6f6c3bc97a8b025c5f5999f622ab138289d144be"} Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.375847 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.378762 5003 patch_prober.go:28] interesting pod/controller-manager-7db8c4c9f8-5ln9p container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" start-of-body= Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.378816 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" podUID="6329ddc6-f735-4be0-b070-86af8cc71df7" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.384768 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" event={"ID":"e9175efa-12f5-44f7-ab78-0a0ead34d311","Type":"ContainerStarted","Data":"6f521d1f4ad6ecf9f4d574c2b1a3b0c00440c68c6eb38c8535d16e2fb5e21aaf"} Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.384812 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" event={"ID":"e9175efa-12f5-44f7-ab78-0a0ead34d311","Type":"ContainerStarted","Data":"eab2808a684860b1bcce0c49504aff044a5285a0914d5b3e47e2f2d2baf09c94"} Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.385705 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.391832 5003 patch_prober.go:28] interesting pod/route-controller-manager-774d86fbb9-9p9qf container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" start-of-body= Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.391906 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" podUID="e9175efa-12f5-44f7-ab78-0a0ead34d311" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.413157 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" podStartSLOduration=3.413140975 podStartE2EDuration="3.413140975s" podCreationTimestamp="2026-01-26 10:48:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:48:18.412428942 +0000 UTC m=+313.953654523" watchObservedRunningTime="2026-01-26 10:48:18.413140975 +0000 UTC m=+313.954366536" Jan 26 10:48:18 crc kubenswrapper[5003]: I0126 10:48:18.413732 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" podStartSLOduration=3.413726824 podStartE2EDuration="3.413726824s" podCreationTimestamp="2026-01-26 10:48:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:48:18.394671168 +0000 UTC m=+313.935896729" watchObservedRunningTime="2026-01-26 10:48:18.413726824 +0000 UTC m=+313.954952385" Jan 26 10:48:19 crc kubenswrapper[5003]: I0126 10:48:19.393443 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:19 crc kubenswrapper[5003]: I0126 10:48:19.394317 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.338077 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p"] Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.338697 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" podUID="6329ddc6-f735-4be0-b070-86af8cc71df7" containerName="controller-manager" containerID="cri-o://bd180edaa42a7ab61d4fd2111ba9ad69712e68f0b22d87ceec09fea95166d959" gracePeriod=30 Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.569445 5003 generic.go:334] "Generic (PLEG): container finished" podID="6329ddc6-f735-4be0-b070-86af8cc71df7" containerID="bd180edaa42a7ab61d4fd2111ba9ad69712e68f0b22d87ceec09fea95166d959" exitCode=0 Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.569549 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" event={"ID":"6329ddc6-f735-4be0-b070-86af8cc71df7","Type":"ContainerDied","Data":"bd180edaa42a7ab61d4fd2111ba9ad69712e68f0b22d87ceec09fea95166d959"} Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.719098 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.838078 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6329ddc6-f735-4be0-b070-86af8cc71df7-serving-cert\") pod \"6329ddc6-f735-4be0-b070-86af8cc71df7\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.838165 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-proxy-ca-bundles\") pod \"6329ddc6-f735-4be0-b070-86af8cc71df7\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.838184 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz4sc\" (UniqueName: \"kubernetes.io/projected/6329ddc6-f735-4be0-b070-86af8cc71df7-kube-api-access-tz4sc\") pod \"6329ddc6-f735-4be0-b070-86af8cc71df7\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.838229 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-client-ca\") pod \"6329ddc6-f735-4be0-b070-86af8cc71df7\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.838257 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-config\") pod \"6329ddc6-f735-4be0-b070-86af8cc71df7\" (UID: \"6329ddc6-f735-4be0-b070-86af8cc71df7\") " Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.839190 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-config" (OuterVolumeSpecName: "config") pod "6329ddc6-f735-4be0-b070-86af8cc71df7" (UID: "6329ddc6-f735-4be0-b070-86af8cc71df7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.839677 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-client-ca" (OuterVolumeSpecName: "client-ca") pod "6329ddc6-f735-4be0-b070-86af8cc71df7" (UID: "6329ddc6-f735-4be0-b070-86af8cc71df7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.839960 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6329ddc6-f735-4be0-b070-86af8cc71df7" (UID: "6329ddc6-f735-4be0-b070-86af8cc71df7"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.845191 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6329ddc6-f735-4be0-b070-86af8cc71df7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6329ddc6-f735-4be0-b070-86af8cc71df7" (UID: "6329ddc6-f735-4be0-b070-86af8cc71df7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.846040 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6329ddc6-f735-4be0-b070-86af8cc71df7-kube-api-access-tz4sc" (OuterVolumeSpecName: "kube-api-access-tz4sc") pod "6329ddc6-f735-4be0-b070-86af8cc71df7" (UID: "6329ddc6-f735-4be0-b070-86af8cc71df7"). InnerVolumeSpecName "kube-api-access-tz4sc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.940021 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6329ddc6-f735-4be0-b070-86af8cc71df7-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.940072 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.940088 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz4sc\" (UniqueName: \"kubernetes.io/projected/6329ddc6-f735-4be0-b070-86af8cc71df7-kube-api-access-tz4sc\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.940100 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:55 crc kubenswrapper[5003]: I0126 10:48:55.940112 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6329ddc6-f735-4be0-b070-86af8cc71df7-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:48:56 crc kubenswrapper[5003]: I0126 10:48:56.579338 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" event={"ID":"6329ddc6-f735-4be0-b070-86af8cc71df7","Type":"ContainerDied","Data":"8a20e0519e77c0980019f45f6f6c3bc97a8b025c5f5999f622ab138289d144be"} Jan 26 10:48:56 crc kubenswrapper[5003]: I0126 10:48:56.579418 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p" Jan 26 10:48:56 crc kubenswrapper[5003]: I0126 10:48:56.579437 5003 scope.go:117] "RemoveContainer" containerID="bd180edaa42a7ab61d4fd2111ba9ad69712e68f0b22d87ceec09fea95166d959" Jan 26 10:48:56 crc kubenswrapper[5003]: I0126 10:48:56.629140 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p"] Jan 26 10:48:56 crc kubenswrapper[5003]: I0126 10:48:56.633795 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7db8c4c9f8-5ln9p"] Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.009545 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6329ddc6-f735-4be0-b070-86af8cc71df7" path="/var/lib/kubelet/pods/6329ddc6-f735-4be0-b070-86af8cc71df7/volumes" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.363306 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-ffd997-fgx5x"] Jan 26 10:48:57 crc kubenswrapper[5003]: E0126 10:48:57.363556 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6329ddc6-f735-4be0-b070-86af8cc71df7" containerName="controller-manager" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.363569 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6329ddc6-f735-4be0-b070-86af8cc71df7" containerName="controller-manager" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.363672 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6329ddc6-f735-4be0-b070-86af8cc71df7" containerName="controller-manager" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.364071 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.380928 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.380990 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.380937 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.381039 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.381489 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.383351 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.390000 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.427634 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-ffd997-fgx5x"] Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.458248 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33796c27-91d8-4720-98ff-f15a78e1e522-serving-cert\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.458342 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-client-ca\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.458410 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-proxy-ca-bundles\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.458439 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7nwg\" (UniqueName: \"kubernetes.io/projected/33796c27-91d8-4720-98ff-f15a78e1e522-kube-api-access-v7nwg\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.458470 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-config\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.558827 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-config\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.559100 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33796c27-91d8-4720-98ff-f15a78e1e522-serving-cert\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.559229 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-client-ca\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.559384 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-proxy-ca-bundles\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.559497 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7nwg\" (UniqueName: \"kubernetes.io/projected/33796c27-91d8-4720-98ff-f15a78e1e522-kube-api-access-v7nwg\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.560252 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-client-ca\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.561026 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-proxy-ca-bundles\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.561187 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33796c27-91d8-4720-98ff-f15a78e1e522-config\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.570896 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33796c27-91d8-4720-98ff-f15a78e1e522-serving-cert\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.583191 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7nwg\" (UniqueName: \"kubernetes.io/projected/33796c27-91d8-4720-98ff-f15a78e1e522-kube-api-access-v7nwg\") pod \"controller-manager-ffd997-fgx5x\" (UID: \"33796c27-91d8-4720-98ff-f15a78e1e522\") " pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:57 crc kubenswrapper[5003]: I0126 10:48:57.688541 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:58 crc kubenswrapper[5003]: I0126 10:48:58.139257 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-ffd997-fgx5x"] Jan 26 10:48:58 crc kubenswrapper[5003]: I0126 10:48:58.591563 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" event={"ID":"33796c27-91d8-4720-98ff-f15a78e1e522","Type":"ContainerStarted","Data":"213c8f79b1c4275ab7a96f6310561ed068eb2c6230561e418a8efb722b0c302f"} Jan 26 10:48:58 crc kubenswrapper[5003]: I0126 10:48:58.591775 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" event={"ID":"33796c27-91d8-4720-98ff-f15a78e1e522","Type":"ContainerStarted","Data":"36147a391156a1d13b927894473d3a04bdb724e0406a0ed639dd419f1fc744c4"} Jan 26 10:48:58 crc kubenswrapper[5003]: I0126 10:48:58.591934 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:58 crc kubenswrapper[5003]: I0126 10:48:58.597940 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" Jan 26 10:48:58 crc kubenswrapper[5003]: I0126 10:48:58.609813 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-ffd997-fgx5x" podStartSLOduration=3.609794828 podStartE2EDuration="3.609794828s" podCreationTimestamp="2026-01-26 10:48:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:48:58.607405694 +0000 UTC m=+354.148631275" watchObservedRunningTime="2026-01-26 10:48:58.609794828 +0000 UTC m=+354.151020389" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.040138 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.040839 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.485710 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-68rxt"] Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.486349 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.504611 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-68rxt"] Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.611676 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6282e329-76b4-4475-8323-acb219451ca0-trusted-ca\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.611718 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.611743 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-bound-sa-token\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.611930 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6282e329-76b4-4475-8323-acb219451ca0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.612040 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6282e329-76b4-4475-8323-acb219451ca0-registry-certificates\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.612112 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6282e329-76b4-4475-8323-acb219451ca0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.612328 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-registry-tls\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.612399 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsgjn\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-kube-api-access-tsgjn\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.628315 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.713336 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-bound-sa-token\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.713408 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6282e329-76b4-4475-8323-acb219451ca0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.713432 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6282e329-76b4-4475-8323-acb219451ca0-registry-certificates\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.713448 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6282e329-76b4-4475-8323-acb219451ca0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.713485 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-registry-tls\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.713505 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsgjn\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-kube-api-access-tsgjn\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.713521 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6282e329-76b4-4475-8323-acb219451ca0-trusted-ca\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.714443 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6282e329-76b4-4475-8323-acb219451ca0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.715188 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6282e329-76b4-4475-8323-acb219451ca0-trusted-ca\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.715245 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6282e329-76b4-4475-8323-acb219451ca0-registry-certificates\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.719401 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-registry-tls\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.719572 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6282e329-76b4-4475-8323-acb219451ca0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.731788 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-bound-sa-token\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.736629 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsgjn\" (UniqueName: \"kubernetes.io/projected/6282e329-76b4-4475-8323-acb219451ca0-kube-api-access-tsgjn\") pod \"image-registry-66df7c8f76-68rxt\" (UID: \"6282e329-76b4-4475-8323-acb219451ca0\") " pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:09 crc kubenswrapper[5003]: I0126 10:49:09.805071 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.201880 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-68rxt"] Jan 26 10:49:10 crc kubenswrapper[5003]: W0126 10:49:10.217115 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6282e329_76b4_4475_8323_acb219451ca0.slice/crio-8b6eb19d3de88f0bdf5a67818a9e353f48080ac992f2cf9d8ea1574150825f1a WatchSource:0}: Error finding container 8b6eb19d3de88f0bdf5a67818a9e353f48080ac992f2cf9d8ea1574150825f1a: Status 404 returned error can't find the container with id 8b6eb19d3de88f0bdf5a67818a9e353f48080ac992f2cf9d8ea1574150825f1a Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.629521 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zccvn"] Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.630901 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.632689 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zccvn"] Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.633044 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.660539 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" event={"ID":"6282e329-76b4-4475-8323-acb219451ca0","Type":"ContainerStarted","Data":"fa901cb542dd6205f7bb923c983deb3e54d72babca85221e470d827e973fcd8b"} Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.660599 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" event={"ID":"6282e329-76b4-4475-8323-acb219451ca0","Type":"ContainerStarted","Data":"8b6eb19d3de88f0bdf5a67818a9e353f48080ac992f2cf9d8ea1574150825f1a"} Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.660695 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.690480 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" podStartSLOduration=1.690464264 podStartE2EDuration="1.690464264s" podCreationTimestamp="2026-01-26 10:49:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:49:10.687868734 +0000 UTC m=+366.229094295" watchObservedRunningTime="2026-01-26 10:49:10.690464264 +0000 UTC m=+366.231689825" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.727795 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-utilities\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.727842 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-catalog-content\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.727924 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smxxv\" (UniqueName: \"kubernetes.io/projected/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-kube-api-access-smxxv\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.818735 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5pkzq"] Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.819936 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.821889 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.828742 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smxxv\" (UniqueName: \"kubernetes.io/projected/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-kube-api-access-smxxv\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.828826 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-catalog-content\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.828845 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-utilities\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.829296 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-utilities\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.829796 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-catalog-content\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.832878 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5pkzq"] Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.856366 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smxxv\" (UniqueName: \"kubernetes.io/projected/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-kube-api-access-smxxv\") pod \"community-operators-zccvn\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.930552 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g7j4\" (UniqueName: \"kubernetes.io/projected/656e647b-438f-442f-bc26-b92b57b3b76e-kube-api-access-6g7j4\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:10 crc kubenswrapper[5003]: I0126 10:49:10.930639 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-utilities\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:10.930880 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-catalog-content\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:10.975129 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.032703 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g7j4\" (UniqueName: \"kubernetes.io/projected/656e647b-438f-442f-bc26-b92b57b3b76e-kube-api-access-6g7j4\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.032990 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-utilities\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.033022 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-catalog-content\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.033435 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-catalog-content\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.033459 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-utilities\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.048989 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g7j4\" (UniqueName: \"kubernetes.io/projected/656e647b-438f-442f-bc26-b92b57b3b76e-kube-api-access-6g7j4\") pod \"certified-operators-5pkzq\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.137739 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.835122 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zccvn"] Jan 26 10:49:11 crc kubenswrapper[5003]: I0126 10:49:11.842835 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5pkzq"] Jan 26 10:49:11 crc kubenswrapper[5003]: W0126 10:49:11.850789 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod656e647b_438f_442f_bc26_b92b57b3b76e.slice/crio-c870f67cd6f4314d6cb0f192fb2d90636fed35a4d04cefac034434fc36f28721 WatchSource:0}: Error finding container c870f67cd6f4314d6cb0f192fb2d90636fed35a4d04cefac034434fc36f28721: Status 404 returned error can't find the container with id c870f67cd6f4314d6cb0f192fb2d90636fed35a4d04cefac034434fc36f28721 Jan 26 10:49:12 crc kubenswrapper[5003]: I0126 10:49:12.671902 5003 generic.go:334] "Generic (PLEG): container finished" podID="656e647b-438f-442f-bc26-b92b57b3b76e" containerID="b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75" exitCode=0 Jan 26 10:49:12 crc kubenswrapper[5003]: I0126 10:49:12.671951 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pkzq" event={"ID":"656e647b-438f-442f-bc26-b92b57b3b76e","Type":"ContainerDied","Data":"b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75"} Jan 26 10:49:12 crc kubenswrapper[5003]: I0126 10:49:12.672220 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pkzq" event={"ID":"656e647b-438f-442f-bc26-b92b57b3b76e","Type":"ContainerStarted","Data":"c870f67cd6f4314d6cb0f192fb2d90636fed35a4d04cefac034434fc36f28721"} Jan 26 10:49:12 crc kubenswrapper[5003]: I0126 10:49:12.674192 5003 generic.go:334] "Generic (PLEG): container finished" podID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerID="d41ec46a0eb8dca2454ada3d69b62d04d5eb482eb423a3951e9a399a5a046858" exitCode=0 Jan 26 10:49:12 crc kubenswrapper[5003]: I0126 10:49:12.674243 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zccvn" event={"ID":"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd","Type":"ContainerDied","Data":"d41ec46a0eb8dca2454ada3d69b62d04d5eb482eb423a3951e9a399a5a046858"} Jan 26 10:49:12 crc kubenswrapper[5003]: I0126 10:49:12.674314 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zccvn" event={"ID":"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd","Type":"ContainerStarted","Data":"fcbb1ac80c073d76e9b405cbd5f78152c7b74914d4a2e6f809a9548775670e45"} Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.018310 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lrt87"] Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.025163 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.027993 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.044008 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lrt87"] Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.161056 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-utilities\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.161126 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-catalog-content\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.161372 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfdjk\" (UniqueName: \"kubernetes.io/projected/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-kube-api-access-mfdjk\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.214954 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s9pdl"] Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.215984 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.217674 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.236658 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s9pdl"] Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.263015 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-catalog-content\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.263162 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-catalog-content\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.263197 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-utilities\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.263268 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjsrh\" (UniqueName: \"kubernetes.io/projected/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-kube-api-access-kjsrh\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.263435 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfdjk\" (UniqueName: \"kubernetes.io/projected/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-kube-api-access-mfdjk\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.263497 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-utilities\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.263882 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-catalog-content\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.265367 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-utilities\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.282775 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfdjk\" (UniqueName: \"kubernetes.io/projected/bfe9e96c-fa33-4e01-beec-4e038b6ba28c-kube-api-access-mfdjk\") pod \"redhat-marketplace-lrt87\" (UID: \"bfe9e96c-fa33-4e01-beec-4e038b6ba28c\") " pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.343469 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.364590 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-catalog-content\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.364691 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-utilities\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.364791 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjsrh\" (UniqueName: \"kubernetes.io/projected/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-kube-api-access-kjsrh\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.365089 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-catalog-content\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.365120 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-utilities\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.383716 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjsrh\" (UniqueName: \"kubernetes.io/projected/e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92-kube-api-access-kjsrh\") pod \"redhat-operators-s9pdl\" (UID: \"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92\") " pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.538383 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.787101 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lrt87"] Jan 26 10:49:13 crc kubenswrapper[5003]: W0126 10:49:13.794380 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfe9e96c_fa33_4e01_beec_4e038b6ba28c.slice/crio-d137e4f91c2f4ac861cc58411446c6acec642fbc179c0014cc174d7624331a10 WatchSource:0}: Error finding container d137e4f91c2f4ac861cc58411446c6acec642fbc179c0014cc174d7624331a10: Status 404 returned error can't find the container with id d137e4f91c2f4ac861cc58411446c6acec642fbc179c0014cc174d7624331a10 Jan 26 10:49:13 crc kubenswrapper[5003]: I0126 10:49:13.945103 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s9pdl"] Jan 26 10:49:13 crc kubenswrapper[5003]: W0126 10:49:13.957015 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8bd36b8_4cf2_435c_8fc9_e3eeef2b4b92.slice/crio-55d66d5ce1108fe64ea4a7a887b9f67db4255c0e5576b42a9ae4011ab5738262 WatchSource:0}: Error finding container 55d66d5ce1108fe64ea4a7a887b9f67db4255c0e5576b42a9ae4011ab5738262: Status 404 returned error can't find the container with id 55d66d5ce1108fe64ea4a7a887b9f67db4255c0e5576b42a9ae4011ab5738262 Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.684921 5003 generic.go:334] "Generic (PLEG): container finished" podID="bfe9e96c-fa33-4e01-beec-4e038b6ba28c" containerID="e61448163b025eb2cb16c71ff0073162716022889de4e860e846fd8e6a8f792a" exitCode=0 Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.684987 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrt87" event={"ID":"bfe9e96c-fa33-4e01-beec-4e038b6ba28c","Type":"ContainerDied","Data":"e61448163b025eb2cb16c71ff0073162716022889de4e860e846fd8e6a8f792a"} Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.685011 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrt87" event={"ID":"bfe9e96c-fa33-4e01-beec-4e038b6ba28c","Type":"ContainerStarted","Data":"d137e4f91c2f4ac861cc58411446c6acec642fbc179c0014cc174d7624331a10"} Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.686258 5003 generic.go:334] "Generic (PLEG): container finished" podID="e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92" containerID="9ba333481dc8fbfbc51cbc0fb6bf70e3cbd6f48cd5341821c2d1abe5a65a1e8c" exitCode=0 Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.686340 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s9pdl" event={"ID":"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92","Type":"ContainerDied","Data":"9ba333481dc8fbfbc51cbc0fb6bf70e3cbd6f48cd5341821c2d1abe5a65a1e8c"} Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.686366 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s9pdl" event={"ID":"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92","Type":"ContainerStarted","Data":"55d66d5ce1108fe64ea4a7a887b9f67db4255c0e5576b42a9ae4011ab5738262"} Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.688433 5003 generic.go:334] "Generic (PLEG): container finished" podID="656e647b-438f-442f-bc26-b92b57b3b76e" containerID="a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2" exitCode=0 Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.688512 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pkzq" event={"ID":"656e647b-438f-442f-bc26-b92b57b3b76e","Type":"ContainerDied","Data":"a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2"} Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.691134 5003 generic.go:334] "Generic (PLEG): container finished" podID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerID="ecf1d84fdf6bce4b74db76fc407ac404661f011632c139443d47b5774a5576d5" exitCode=0 Jan 26 10:49:14 crc kubenswrapper[5003]: I0126 10:49:14.691154 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zccvn" event={"ID":"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd","Type":"ContainerDied","Data":"ecf1d84fdf6bce4b74db76fc407ac404661f011632c139443d47b5774a5576d5"} Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.390543 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf"] Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.391470 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" podUID="e9175efa-12f5-44f7-ab78-0a0ead34d311" containerName="route-controller-manager" containerID="cri-o://6f521d1f4ad6ecf9f4d574c2b1a3b0c00440c68c6eb38c8535d16e2fb5e21aaf" gracePeriod=30 Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.697739 5003 generic.go:334] "Generic (PLEG): container finished" podID="bfe9e96c-fa33-4e01-beec-4e038b6ba28c" containerID="d5fcbe9582304d4a926b707abcbb047e3c1f0e3a330e8555d5bfcb3db9607da2" exitCode=0 Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.697843 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrt87" event={"ID":"bfe9e96c-fa33-4e01-beec-4e038b6ba28c","Type":"ContainerDied","Data":"d5fcbe9582304d4a926b707abcbb047e3c1f0e3a330e8555d5bfcb3db9607da2"} Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.699985 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s9pdl" event={"ID":"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92","Type":"ContainerStarted","Data":"0e7583eca366281d6860f594252d35fea8aed4379a11a3edc9efc84ee37e2315"} Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.703243 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pkzq" event={"ID":"656e647b-438f-442f-bc26-b92b57b3b76e","Type":"ContainerStarted","Data":"47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b"} Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.704924 5003 generic.go:334] "Generic (PLEG): container finished" podID="e9175efa-12f5-44f7-ab78-0a0ead34d311" containerID="6f521d1f4ad6ecf9f4d574c2b1a3b0c00440c68c6eb38c8535d16e2fb5e21aaf" exitCode=0 Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.704998 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" event={"ID":"e9175efa-12f5-44f7-ab78-0a0ead34d311","Type":"ContainerDied","Data":"6f521d1f4ad6ecf9f4d574c2b1a3b0c00440c68c6eb38c8535d16e2fb5e21aaf"} Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.708268 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zccvn" event={"ID":"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd","Type":"ContainerStarted","Data":"765b83a9fdec456e3f2322711efb642bacfe11dedc9a34332cff3a401afdb5cc"} Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.733247 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5pkzq" podStartSLOduration=3.307435572 podStartE2EDuration="5.733228484s" podCreationTimestamp="2026-01-26 10:49:10 +0000 UTC" firstStartedPulling="2026-01-26 10:49:12.673555014 +0000 UTC m=+368.214780575" lastFinishedPulling="2026-01-26 10:49:15.099347926 +0000 UTC m=+370.640573487" observedRunningTime="2026-01-26 10:49:15.729425125 +0000 UTC m=+371.270650706" watchObservedRunningTime="2026-01-26 10:49:15.733228484 +0000 UTC m=+371.274454035" Jan 26 10:49:15 crc kubenswrapper[5003]: I0126 10:49:15.753651 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zccvn" podStartSLOduration=3.298550676 podStartE2EDuration="5.753630737s" podCreationTimestamp="2026-01-26 10:49:10 +0000 UTC" firstStartedPulling="2026-01-26 10:49:12.675870316 +0000 UTC m=+368.217095887" lastFinishedPulling="2026-01-26 10:49:15.130950367 +0000 UTC m=+370.672175948" observedRunningTime="2026-01-26 10:49:15.751676756 +0000 UTC m=+371.292902317" watchObservedRunningTime="2026-01-26 10:49:15.753630737 +0000 UTC m=+371.294856308" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.392998 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.427791 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8"] Jan 26 10:49:16 crc kubenswrapper[5003]: E0126 10:49:16.428012 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9175efa-12f5-44f7-ab78-0a0ead34d311" containerName="route-controller-manager" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.428031 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9175efa-12f5-44f7-ab78-0a0ead34d311" containerName="route-controller-manager" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.428170 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9175efa-12f5-44f7-ab78-0a0ead34d311" containerName="route-controller-manager" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.428612 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.437535 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8"] Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.517555 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cngvc\" (UniqueName: \"kubernetes.io/projected/e9175efa-12f5-44f7-ab78-0a0ead34d311-kube-api-access-cngvc\") pod \"e9175efa-12f5-44f7-ab78-0a0ead34d311\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.517611 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-client-ca\") pod \"e9175efa-12f5-44f7-ab78-0a0ead34d311\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.517630 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-config\") pod \"e9175efa-12f5-44f7-ab78-0a0ead34d311\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.517712 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9175efa-12f5-44f7-ab78-0a0ead34d311-serving-cert\") pod \"e9175efa-12f5-44f7-ab78-0a0ead34d311\" (UID: \"e9175efa-12f5-44f7-ab78-0a0ead34d311\") " Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.517897 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82ca2207-a635-48ef-877e-f1be38ea53ed-config\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.517919 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82ca2207-a635-48ef-877e-f1be38ea53ed-client-ca\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.518048 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45h9d\" (UniqueName: \"kubernetes.io/projected/82ca2207-a635-48ef-877e-f1be38ea53ed-kube-api-access-45h9d\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.518201 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82ca2207-a635-48ef-877e-f1be38ea53ed-serving-cert\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.518345 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-client-ca" (OuterVolumeSpecName: "client-ca") pod "e9175efa-12f5-44f7-ab78-0a0ead34d311" (UID: "e9175efa-12f5-44f7-ab78-0a0ead34d311"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.518353 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-config" (OuterVolumeSpecName: "config") pod "e9175efa-12f5-44f7-ab78-0a0ead34d311" (UID: "e9175efa-12f5-44f7-ab78-0a0ead34d311"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.526506 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9175efa-12f5-44f7-ab78-0a0ead34d311-kube-api-access-cngvc" (OuterVolumeSpecName: "kube-api-access-cngvc") pod "e9175efa-12f5-44f7-ab78-0a0ead34d311" (UID: "e9175efa-12f5-44f7-ab78-0a0ead34d311"). InnerVolumeSpecName "kube-api-access-cngvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.536048 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9175efa-12f5-44f7-ab78-0a0ead34d311-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e9175efa-12f5-44f7-ab78-0a0ead34d311" (UID: "e9175efa-12f5-44f7-ab78-0a0ead34d311"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.619603 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82ca2207-a635-48ef-877e-f1be38ea53ed-config\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.619646 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82ca2207-a635-48ef-877e-f1be38ea53ed-client-ca\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.619677 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45h9d\" (UniqueName: \"kubernetes.io/projected/82ca2207-a635-48ef-877e-f1be38ea53ed-kube-api-access-45h9d\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.619709 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82ca2207-a635-48ef-877e-f1be38ea53ed-serving-cert\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.619756 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9175efa-12f5-44f7-ab78-0a0ead34d311-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.619768 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cngvc\" (UniqueName: \"kubernetes.io/projected/e9175efa-12f5-44f7-ab78-0a0ead34d311-kube-api-access-cngvc\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.619779 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.619787 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9175efa-12f5-44f7-ab78-0a0ead34d311-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.620672 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82ca2207-a635-48ef-877e-f1be38ea53ed-client-ca\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.620933 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82ca2207-a635-48ef-877e-f1be38ea53ed-config\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.624113 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82ca2207-a635-48ef-877e-f1be38ea53ed-serving-cert\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.635942 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45h9d\" (UniqueName: \"kubernetes.io/projected/82ca2207-a635-48ef-877e-f1be38ea53ed-kube-api-access-45h9d\") pod \"route-controller-manager-658d7bcc8c-9pmp8\" (UID: \"82ca2207-a635-48ef-877e-f1be38ea53ed\") " pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.720022 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" event={"ID":"e9175efa-12f5-44f7-ab78-0a0ead34d311","Type":"ContainerDied","Data":"eab2808a684860b1bcce0c49504aff044a5285a0914d5b3e47e2f2d2baf09c94"} Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.720071 5003 scope.go:117] "RemoveContainer" containerID="6f521d1f4ad6ecf9f4d574c2b1a3b0c00440c68c6eb38c8535d16e2fb5e21aaf" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.720171 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.727265 5003 generic.go:334] "Generic (PLEG): container finished" podID="e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92" containerID="0e7583eca366281d6860f594252d35fea8aed4379a11a3edc9efc84ee37e2315" exitCode=0 Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.727406 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s9pdl" event={"ID":"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92","Type":"ContainerDied","Data":"0e7583eca366281d6860f594252d35fea8aed4379a11a3edc9efc84ee37e2315"} Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.748679 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.774917 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf"] Jan 26 10:49:16 crc kubenswrapper[5003]: I0126 10:49:16.780697 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-774d86fbb9-9p9qf"] Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.010998 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9175efa-12f5-44f7-ab78-0a0ead34d311" path="/var/lib/kubelet/pods/e9175efa-12f5-44f7-ab78-0a0ead34d311/volumes" Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.288233 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8"] Jan 26 10:49:17 crc kubenswrapper[5003]: W0126 10:49:17.305167 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82ca2207_a635_48ef_877e_f1be38ea53ed.slice/crio-4d82d39dc3d0df3613c76db7472ce2b8872627349b7b96d22d857330a9b8f5de WatchSource:0}: Error finding container 4d82d39dc3d0df3613c76db7472ce2b8872627349b7b96d22d857330a9b8f5de: Status 404 returned error can't find the container with id 4d82d39dc3d0df3613c76db7472ce2b8872627349b7b96d22d857330a9b8f5de Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.745320 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" event={"ID":"82ca2207-a635-48ef-877e-f1be38ea53ed","Type":"ContainerStarted","Data":"ea043d5167e177b60c4f9f441b7f4155e76fd29800125e88a25c582ddd7f1fc5"} Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.745621 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" event={"ID":"82ca2207-a635-48ef-877e-f1be38ea53ed","Type":"ContainerStarted","Data":"4d82d39dc3d0df3613c76db7472ce2b8872627349b7b96d22d857330a9b8f5de"} Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.745640 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.750734 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lrt87" event={"ID":"bfe9e96c-fa33-4e01-beec-4e038b6ba28c","Type":"ContainerStarted","Data":"d960b1c561fd431e87cb818694078921a26d022d6d78e7086d90932fec727b9f"} Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.753322 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s9pdl" event={"ID":"e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92","Type":"ContainerStarted","Data":"48fe5352483ff8ca1fd086972e52af415f09722cc18378e409458f59bd426fad"} Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.767045 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" podStartSLOduration=2.767026098 podStartE2EDuration="2.767026098s" podCreationTimestamp="2026-01-26 10:49:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:49:17.764960114 +0000 UTC m=+373.306185695" watchObservedRunningTime="2026-01-26 10:49:17.767026098 +0000 UTC m=+373.308251659" Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.788437 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lrt87" podStartSLOduration=2.451068105 podStartE2EDuration="4.788411742s" podCreationTimestamp="2026-01-26 10:49:13 +0000 UTC" firstStartedPulling="2026-01-26 10:49:14.686759639 +0000 UTC m=+370.227985220" lastFinishedPulling="2026-01-26 10:49:17.024103296 +0000 UTC m=+372.565328857" observedRunningTime="2026-01-26 10:49:17.786515723 +0000 UTC m=+373.327741284" watchObservedRunningTime="2026-01-26 10:49:17.788411742 +0000 UTC m=+373.329637303" Jan 26 10:49:17 crc kubenswrapper[5003]: I0126 10:49:17.810678 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s9pdl" podStartSLOduration=2.284616289 podStartE2EDuration="4.810653803s" podCreationTimestamp="2026-01-26 10:49:13 +0000 UTC" firstStartedPulling="2026-01-26 10:49:14.68774307 +0000 UTC m=+370.228968631" lastFinishedPulling="2026-01-26 10:49:17.213780584 +0000 UTC m=+372.755006145" observedRunningTime="2026-01-26 10:49:17.805880065 +0000 UTC m=+373.347105626" watchObservedRunningTime="2026-01-26 10:49:17.810653803 +0000 UTC m=+373.351879364" Jan 26 10:49:18 crc kubenswrapper[5003]: I0126 10:49:18.022135 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-658d7bcc8c-9pmp8" Jan 26 10:49:20 crc kubenswrapper[5003]: I0126 10:49:20.976275 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:20 crc kubenswrapper[5003]: I0126 10:49:20.976860 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:21 crc kubenswrapper[5003]: I0126 10:49:21.025514 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:21 crc kubenswrapper[5003]: I0126 10:49:21.138450 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:21 crc kubenswrapper[5003]: I0126 10:49:21.138507 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:21 crc kubenswrapper[5003]: I0126 10:49:21.186373 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:21 crc kubenswrapper[5003]: I0126 10:49:21.808974 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zccvn" Jan 26 10:49:21 crc kubenswrapper[5003]: I0126 10:49:21.812354 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:49:23 crc kubenswrapper[5003]: I0126 10:49:23.344014 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:23 crc kubenswrapper[5003]: I0126 10:49:23.344398 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:23 crc kubenswrapper[5003]: I0126 10:49:23.384302 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:23 crc kubenswrapper[5003]: I0126 10:49:23.539670 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:23 crc kubenswrapper[5003]: I0126 10:49:23.539734 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:23 crc kubenswrapper[5003]: I0126 10:49:23.582078 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:23 crc kubenswrapper[5003]: I0126 10:49:23.833818 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s9pdl" Jan 26 10:49:23 crc kubenswrapper[5003]: I0126 10:49:23.834845 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lrt87" Jan 26 10:49:29 crc kubenswrapper[5003]: I0126 10:49:29.814716 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-68rxt" Jan 26 10:49:29 crc kubenswrapper[5003]: I0126 10:49:29.888780 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2kvjq"] Jan 26 10:49:39 crc kubenswrapper[5003]: I0126 10:49:39.040589 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:49:39 crc kubenswrapper[5003]: I0126 10:49:39.041137 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:49:54 crc kubenswrapper[5003]: I0126 10:49:54.930553 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" podUID="a56181a5-de19-48f8-8a39-73d3ea6c9d1e" containerName="registry" containerID="cri-o://f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41" gracePeriod=30 Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.318829 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.442887 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-bound-sa-token\") pod \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.442951 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-certificates\") pod \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.442995 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-trusted-ca\") pod \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.443179 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.443241 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-ca-trust-extracted\") pod \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.443342 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2djc\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-kube-api-access-f2djc\") pod \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.443390 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-tls\") pod \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.443428 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-installation-pull-secrets\") pod \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\" (UID: \"a56181a5-de19-48f8-8a39-73d3ea6c9d1e\") " Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.444007 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a56181a5-de19-48f8-8a39-73d3ea6c9d1e" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.444833 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "a56181a5-de19-48f8-8a39-73d3ea6c9d1e" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.450160 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "a56181a5-de19-48f8-8a39-73d3ea6c9d1e" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.450199 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "a56181a5-de19-48f8-8a39-73d3ea6c9d1e" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.450565 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a56181a5-de19-48f8-8a39-73d3ea6c9d1e" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.451007 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-kube-api-access-f2djc" (OuterVolumeSpecName: "kube-api-access-f2djc") pod "a56181a5-de19-48f8-8a39-73d3ea6c9d1e" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e"). InnerVolumeSpecName "kube-api-access-f2djc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.453987 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "a56181a5-de19-48f8-8a39-73d3ea6c9d1e" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.459965 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "a56181a5-de19-48f8-8a39-73d3ea6c9d1e" (UID: "a56181a5-de19-48f8-8a39-73d3ea6c9d1e"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.544953 5003 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.544995 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2djc\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-kube-api-access-f2djc\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.545009 5003 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.545018 5003 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.545026 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.545035 5003 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.545042 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a56181a5-de19-48f8-8a39-73d3ea6c9d1e-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.955962 5003 generic.go:334] "Generic (PLEG): container finished" podID="a56181a5-de19-48f8-8a39-73d3ea6c9d1e" containerID="f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41" exitCode=0 Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.955994 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.956012 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" event={"ID":"a56181a5-de19-48f8-8a39-73d3ea6c9d1e","Type":"ContainerDied","Data":"f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41"} Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.956042 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2kvjq" event={"ID":"a56181a5-de19-48f8-8a39-73d3ea6c9d1e","Type":"ContainerDied","Data":"15962a74288ae2ba4fda966b5fb83af41ec5b592de07af7a667d08caf5abcaba"} Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.956061 5003 scope.go:117] "RemoveContainer" containerID="f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.970702 5003 scope.go:117] "RemoveContainer" containerID="f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41" Jan 26 10:49:55 crc kubenswrapper[5003]: E0126 10:49:55.971148 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41\": container with ID starting with f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41 not found: ID does not exist" containerID="f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.971185 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41"} err="failed to get container status \"f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41\": rpc error: code = NotFound desc = could not find container \"f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41\": container with ID starting with f7cc26ecc643673f6e733f2a7e36462d73f69e19590e44f6a6d41bb0e4ecbb41 not found: ID does not exist" Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.981820 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2kvjq"] Jan 26 10:49:55 crc kubenswrapper[5003]: I0126 10:49:55.988952 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2kvjq"] Jan 26 10:49:57 crc kubenswrapper[5003]: I0126 10:49:57.009004 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a56181a5-de19-48f8-8a39-73d3ea6c9d1e" path="/var/lib/kubelet/pods/a56181a5-de19-48f8-8a39-73d3ea6c9d1e/volumes" Jan 26 10:50:09 crc kubenswrapper[5003]: I0126 10:50:09.039809 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:50:09 crc kubenswrapper[5003]: I0126 10:50:09.040333 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:50:09 crc kubenswrapper[5003]: I0126 10:50:09.040399 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:50:09 crc kubenswrapper[5003]: I0126 10:50:09.041103 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dd6b3a58165b8c76447b588cc3ce270db8803864644876e30d9eecfbf65acf09"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 10:50:09 crc kubenswrapper[5003]: I0126 10:50:09.041174 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://dd6b3a58165b8c76447b588cc3ce270db8803864644876e30d9eecfbf65acf09" gracePeriod=600 Jan 26 10:50:10 crc kubenswrapper[5003]: I0126 10:50:10.033118 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="dd6b3a58165b8c76447b588cc3ce270db8803864644876e30d9eecfbf65acf09" exitCode=0 Jan 26 10:50:10 crc kubenswrapper[5003]: I0126 10:50:10.033197 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"dd6b3a58165b8c76447b588cc3ce270db8803864644876e30d9eecfbf65acf09"} Jan 26 10:50:10 crc kubenswrapper[5003]: I0126 10:50:10.033461 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"4c0aa82b43e2be72d561c0781bb825479f5b77be8b957270d8337126584ed98e"} Jan 26 10:50:10 crc kubenswrapper[5003]: I0126 10:50:10.033485 5003 scope.go:117] "RemoveContainer" containerID="847f16e05a0f7e52160dccc449727e22e1670cda7e86dd4224066fc61027619a" Jan 26 10:52:09 crc kubenswrapper[5003]: I0126 10:52:09.039918 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:52:09 crc kubenswrapper[5003]: I0126 10:52:09.040733 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:52:39 crc kubenswrapper[5003]: I0126 10:52:39.040838 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:52:39 crc kubenswrapper[5003]: I0126 10:52:39.041413 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.040928 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.041730 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.041790 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.042496 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4c0aa82b43e2be72d561c0781bb825479f5b77be8b957270d8337126584ed98e"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.042566 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://4c0aa82b43e2be72d561c0781bb825479f5b77be8b957270d8337126584ed98e" gracePeriod=600 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.524242 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="4c0aa82b43e2be72d561c0781bb825479f5b77be8b957270d8337126584ed98e" exitCode=0 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.524317 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"4c0aa82b43e2be72d561c0781bb825479f5b77be8b957270d8337126584ed98e"} Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.524705 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"4dd55168d07d12b4dda1e126f43b86ddabeac34b8ea63b9c2a281cb6276edb9b"} Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.524725 5003 scope.go:117] "RemoveContainer" containerID="dd6b3a58165b8c76447b588cc3ce270db8803864644876e30d9eecfbf65acf09" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.591855 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q24zl"] Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.592467 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovn-controller" containerID="cri-o://33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f" gracePeriod=30 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.592560 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="nbdb" containerID="cri-o://c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19" gracePeriod=30 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.592601 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="northd" containerID="cri-o://7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34" gracePeriod=30 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.592707 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2" gracePeriod=30 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.592640 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="sbdb" containerID="cri-o://1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec" gracePeriod=30 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.592708 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovn-acl-logging" containerID="cri-o://03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451" gracePeriod=30 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.592654 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kube-rbac-proxy-node" containerID="cri-o://b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68" gracePeriod=30 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.636154 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" containerID="cri-o://6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" gracePeriod=30 Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.848144 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/3.log" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.850244 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovn-acl-logging/0.log" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.850774 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovn-controller/0.log" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.851215 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.906798 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-f65b8"] Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907102 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="sbdb" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907127 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="sbdb" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907145 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907160 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907178 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907191 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907205 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovn-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907217 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovn-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907236 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="nbdb" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907248 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="nbdb" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907272 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a56181a5-de19-48f8-8a39-73d3ea6c9d1e" containerName="registry" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907283 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a56181a5-de19-48f8-8a39-73d3ea6c9d1e" containerName="registry" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907328 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovn-acl-logging" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907340 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovn-acl-logging" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907355 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907368 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907384 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kube-rbac-proxy-node" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907397 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kube-rbac-proxy-node" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907409 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kubecfg-setup" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907421 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kubecfg-setup" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907440 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="northd" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907451 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="northd" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907465 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907476 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907624 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="sbdb" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907641 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907656 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907673 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907691 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="nbdb" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907705 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovn-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907720 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovn-acl-logging" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907738 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="kube-rbac-proxy-node" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907753 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907768 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a56181a5-de19-48f8-8a39-73d3ea6c9d1e" containerName="registry" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907780 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="northd" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.907928 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.907941 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.908098 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.908117 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: E0126 10:53:09.908296 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.908348 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerName="ovnkube-controller" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.911667 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.956961 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-node-log\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957039 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-log-socket\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957073 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-openvswitch\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957067 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-node-log" (OuterVolumeSpecName: "node-log") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957101 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957124 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-var-lib-openvswitch\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957153 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xp458\" (UniqueName: \"kubernetes.io/projected/f9a98683-f9ac-45d4-9312-43ebf25bdb52-kube-api-access-xp458\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957191 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-script-lib\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957259 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-systemd-units\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957286 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-bin\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957342 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-config\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957195 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957389 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-etc-openvswitch\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957428 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-ovn-kubernetes\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957456 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-netd\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957512 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovn-node-metrics-cert\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957545 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-netns\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957573 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-env-overrides\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957594 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-slash\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957615 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-systemd\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957642 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-ovn\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957671 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-kubelet\") pod \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\" (UID: \"f9a98683-f9ac-45d4-9312-43ebf25bdb52\") " Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957189 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957227 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-log-socket" (OuterVolumeSpecName: "log-socket") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957273 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957370 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957418 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957454 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957657 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957681 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957754 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957772 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957935 5003 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957957 5003 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957969 5003 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957980 5003 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.957992 5003 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958003 5003 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958015 5003 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-node-log\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958026 5003 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-log-socket\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958037 5003 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958048 5003 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958061 5003 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958072 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958092 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958116 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958156 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-slash" (OuterVolumeSpecName: "host-slash") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958336 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.958816 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.963005 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a98683-f9ac-45d4-9312-43ebf25bdb52-kube-api-access-xp458" (OuterVolumeSpecName: "kube-api-access-xp458") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "kube-api-access-xp458". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.963163 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:53:09 crc kubenswrapper[5003]: I0126 10:53:09.970208 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f9a98683-f9ac-45d4-9312-43ebf25bdb52" (UID: "f9a98683-f9ac-45d4-9312-43ebf25bdb52"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059215 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-run-ovn-kubernetes\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059268 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059302 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-systemd\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059320 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l84wh\" (UniqueName: \"kubernetes.io/projected/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-kube-api-access-l84wh\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059340 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-slash\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059354 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-cni-bin\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059372 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovnkube-config\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059388 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-cni-netd\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059531 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-node-log\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059605 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-etc-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059626 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-run-netns\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059689 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovnkube-script-lib\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059714 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-systemd-units\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059736 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-ovn\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059758 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-env-overrides\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059784 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-var-lib-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059805 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-kubelet\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.059962 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-log-socket\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060108 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060259 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovn-node-metrics-cert\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060441 5003 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060471 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xp458\" (UniqueName: \"kubernetes.io/projected/f9a98683-f9ac-45d4-9312-43ebf25bdb52-kube-api-access-xp458\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060490 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060507 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f9a98683-f9ac-45d4-9312-43ebf25bdb52-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060519 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f9a98683-f9ac-45d4-9312-43ebf25bdb52-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060530 5003 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-host-slash\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060539 5003 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.060550 5003 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f9a98683-f9ac-45d4-9312-43ebf25bdb52-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161643 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-env-overrides\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161706 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-var-lib-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161734 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-kubelet\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161768 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-log-socket\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161796 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161818 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovn-node-metrics-cert\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161852 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-run-ovn-kubernetes\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161891 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-kubelet\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161902 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161935 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.161966 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-systemd\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162015 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l84wh\" (UniqueName: \"kubernetes.io/projected/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-kube-api-access-l84wh\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162020 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162043 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-var-lib-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162099 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-slash\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162113 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-log-socket\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162052 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-slash\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162134 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-systemd\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162181 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-cni-bin\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162203 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-run-ovn-kubernetes\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162213 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovnkube-config\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162318 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-cni-netd\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162361 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-node-log\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162392 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-etc-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162413 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-run-netns\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162433 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovnkube-script-lib\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-systemd-units\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162482 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-ovn\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162561 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-run-ovn\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162575 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-etc-openvswitch\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162607 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-run-netns\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162625 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-cni-netd\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162659 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-node-log\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162696 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-host-cni-bin\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.162728 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-systemd-units\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.163387 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovnkube-config\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.163743 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-env-overrides\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.164131 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovnkube-script-lib\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.167479 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-ovn-node-metrics-cert\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.184864 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l84wh\" (UniqueName: \"kubernetes.io/projected/a05d3b7c-c79f-46cb-9adb-c30b7d0006da-kube-api-access-l84wh\") pod \"ovnkube-node-f65b8\" (UID: \"a05d3b7c-c79f-46cb-9adb-c30b7d0006da\") " pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.233184 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.541006 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovnkube-controller/3.log" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.545789 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovn-acl-logging/0.log" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.546974 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q24zl_f9a98683-f9ac-45d4-9312-43ebf25bdb52/ovn-controller/0.log" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547793 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" exitCode=0 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547851 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec" exitCode=0 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547864 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19" exitCode=0 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547876 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34" exitCode=0 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547887 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2" exitCode=0 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547901 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68" exitCode=0 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547913 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451" exitCode=143 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547924 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" containerID="33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f" exitCode=143 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.547961 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548242 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548319 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548337 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548353 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548380 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548393 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548414 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548431 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548440 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548448 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548456 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548466 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548475 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548484 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548495 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548505 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548518 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548528 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548538 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548547 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548557 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548566 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548574 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548583 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548592 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548601 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548616 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548629 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548638 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548649 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548657 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548665 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548672 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548679 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548687 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548694 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548702 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548711 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q24zl" event={"ID":"f9a98683-f9ac-45d4-9312-43ebf25bdb52","Type":"ContainerDied","Data":"0f88ecab4e09e11a3e128239077bcbcaa6341f139f9e872a006c907d8c619d28"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548722 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548731 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548739 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548748 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548756 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548763 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548773 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548780 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548787 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548795 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.548818 5003 scope.go:117] "RemoveContainer" containerID="6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.555331 5003 generic.go:334] "Generic (PLEG): container finished" podID="a05d3b7c-c79f-46cb-9adb-c30b7d0006da" containerID="7933188be0d40525d91e0b75e671cde886683d5ae630be8fbeba102386afe967" exitCode=0 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.555573 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerDied","Data":"7933188be0d40525d91e0b75e671cde886683d5ae630be8fbeba102386afe967"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.555683 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"8a79fd719ec5ff3e2db9f0af550aa3b67a9dc523dfcce53d015b9aa63802d704"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.560864 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/2.log" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.561776 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/1.log" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.561894 5003 generic.go:334] "Generic (PLEG): container finished" podID="9a2a5d08-c449-45c6-8e1f-340c076422db" containerID="cd218687710b6fabb66404835f025c68b5ee6af1e63c65283186b8190108f4bb" exitCode=2 Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.561945 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpb6l" event={"ID":"9a2a5d08-c449-45c6-8e1f-340c076422db","Type":"ContainerDied","Data":"cd218687710b6fabb66404835f025c68b5ee6af1e63c65283186b8190108f4bb"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.561979 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f"} Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.562663 5003 scope.go:117] "RemoveContainer" containerID="cd218687710b6fabb66404835f025c68b5ee6af1e63c65283186b8190108f4bb" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.563093 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vpb6l_openshift-multus(9a2a5d08-c449-45c6-8e1f-340c076422db)\"" pod="openshift-multus/multus-vpb6l" podUID="9a2a5d08-c449-45c6-8e1f-340c076422db" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.605442 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.631184 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q24zl"] Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.637623 5003 scope.go:117] "RemoveContainer" containerID="1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.640754 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q24zl"] Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.664605 5003 scope.go:117] "RemoveContainer" containerID="c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.683517 5003 scope.go:117] "RemoveContainer" containerID="7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.700677 5003 scope.go:117] "RemoveContainer" containerID="f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.727192 5003 scope.go:117] "RemoveContainer" containerID="b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.750693 5003 scope.go:117] "RemoveContainer" containerID="03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.770117 5003 scope.go:117] "RemoveContainer" containerID="33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.786205 5003 scope.go:117] "RemoveContainer" containerID="be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.803668 5003 scope.go:117] "RemoveContainer" containerID="6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.804488 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": container with ID starting with 6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489 not found: ID does not exist" containerID="6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.804567 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} err="failed to get container status \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": rpc error: code = NotFound desc = could not find container \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": container with ID starting with 6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.804604 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.804977 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": container with ID starting with 2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9 not found: ID does not exist" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.805003 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} err="failed to get container status \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": rpc error: code = NotFound desc = could not find container \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": container with ID starting with 2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.805041 5003 scope.go:117] "RemoveContainer" containerID="1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.805420 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": container with ID starting with 1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec not found: ID does not exist" containerID="1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.805465 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} err="failed to get container status \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": rpc error: code = NotFound desc = could not find container \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": container with ID starting with 1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.805486 5003 scope.go:117] "RemoveContainer" containerID="c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.806189 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": container with ID starting with c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19 not found: ID does not exist" containerID="c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.806238 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} err="failed to get container status \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": rpc error: code = NotFound desc = could not find container \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": container with ID starting with c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.806256 5003 scope.go:117] "RemoveContainer" containerID="7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.806632 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": container with ID starting with 7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34 not found: ID does not exist" containerID="7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.806682 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} err="failed to get container status \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": rpc error: code = NotFound desc = could not find container \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": container with ID starting with 7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.806700 5003 scope.go:117] "RemoveContainer" containerID="f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.807049 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": container with ID starting with f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2 not found: ID does not exist" containerID="f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.807077 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} err="failed to get container status \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": rpc error: code = NotFound desc = could not find container \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": container with ID starting with f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.807154 5003 scope.go:117] "RemoveContainer" containerID="b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.807469 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": container with ID starting with b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68 not found: ID does not exist" containerID="b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.807546 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} err="failed to get container status \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": rpc error: code = NotFound desc = could not find container \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": container with ID starting with b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.807565 5003 scope.go:117] "RemoveContainer" containerID="03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.807848 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": container with ID starting with 03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451 not found: ID does not exist" containerID="03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.807876 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} err="failed to get container status \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": rpc error: code = NotFound desc = could not find container \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": container with ID starting with 03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.807895 5003 scope.go:117] "RemoveContainer" containerID="33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.808132 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": container with ID starting with 33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f not found: ID does not exist" containerID="33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.808179 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} err="failed to get container status \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": rpc error: code = NotFound desc = could not find container \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": container with ID starting with 33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.808196 5003 scope.go:117] "RemoveContainer" containerID="be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff" Jan 26 10:53:10 crc kubenswrapper[5003]: E0126 10:53:10.808519 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": container with ID starting with be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff not found: ID does not exist" containerID="be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.808581 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} err="failed to get container status \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": rpc error: code = NotFound desc = could not find container \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": container with ID starting with be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.808603 5003 scope.go:117] "RemoveContainer" containerID="6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.809190 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} err="failed to get container status \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": rpc error: code = NotFound desc = could not find container \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": container with ID starting with 6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.809258 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.809521 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} err="failed to get container status \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": rpc error: code = NotFound desc = could not find container \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": container with ID starting with 2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.809547 5003 scope.go:117] "RemoveContainer" containerID="1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.809751 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} err="failed to get container status \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": rpc error: code = NotFound desc = could not find container \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": container with ID starting with 1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.809822 5003 scope.go:117] "RemoveContainer" containerID="c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.810142 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} err="failed to get container status \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": rpc error: code = NotFound desc = could not find container \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": container with ID starting with c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.810182 5003 scope.go:117] "RemoveContainer" containerID="7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.810462 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} err="failed to get container status \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": rpc error: code = NotFound desc = could not find container \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": container with ID starting with 7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.810536 5003 scope.go:117] "RemoveContainer" containerID="f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.810862 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} err="failed to get container status \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": rpc error: code = NotFound desc = could not find container \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": container with ID starting with f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.810889 5003 scope.go:117] "RemoveContainer" containerID="b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.811255 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} err="failed to get container status \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": rpc error: code = NotFound desc = could not find container \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": container with ID starting with b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.811319 5003 scope.go:117] "RemoveContainer" containerID="03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.811930 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} err="failed to get container status \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": rpc error: code = NotFound desc = could not find container \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": container with ID starting with 03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.811976 5003 scope.go:117] "RemoveContainer" containerID="33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.812174 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} err="failed to get container status \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": rpc error: code = NotFound desc = could not find container \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": container with ID starting with 33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.812197 5003 scope.go:117] "RemoveContainer" containerID="be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.812503 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} err="failed to get container status \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": rpc error: code = NotFound desc = could not find container \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": container with ID starting with be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.812535 5003 scope.go:117] "RemoveContainer" containerID="6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.813470 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} err="failed to get container status \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": rpc error: code = NotFound desc = could not find container \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": container with ID starting with 6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.813515 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.813776 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} err="failed to get container status \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": rpc error: code = NotFound desc = could not find container \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": container with ID starting with 2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.813803 5003 scope.go:117] "RemoveContainer" containerID="1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.814034 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} err="failed to get container status \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": rpc error: code = NotFound desc = could not find container \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": container with ID starting with 1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.814110 5003 scope.go:117] "RemoveContainer" containerID="c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.814390 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} err="failed to get container status \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": rpc error: code = NotFound desc = could not find container \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": container with ID starting with c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.814416 5003 scope.go:117] "RemoveContainer" containerID="7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.814682 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} err="failed to get container status \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": rpc error: code = NotFound desc = could not find container \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": container with ID starting with 7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.814710 5003 scope.go:117] "RemoveContainer" containerID="f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.815003 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} err="failed to get container status \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": rpc error: code = NotFound desc = could not find container \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": container with ID starting with f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.815129 5003 scope.go:117] "RemoveContainer" containerID="b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.815568 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} err="failed to get container status \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": rpc error: code = NotFound desc = could not find container \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": container with ID starting with b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.817412 5003 scope.go:117] "RemoveContainer" containerID="03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.817729 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} err="failed to get container status \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": rpc error: code = NotFound desc = could not find container \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": container with ID starting with 03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.817761 5003 scope.go:117] "RemoveContainer" containerID="33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.818063 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} err="failed to get container status \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": rpc error: code = NotFound desc = could not find container \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": container with ID starting with 33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.818096 5003 scope.go:117] "RemoveContainer" containerID="be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.818360 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} err="failed to get container status \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": rpc error: code = NotFound desc = could not find container \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": container with ID starting with be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.818385 5003 scope.go:117] "RemoveContainer" containerID="6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.818653 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} err="failed to get container status \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": rpc error: code = NotFound desc = could not find container \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": container with ID starting with 6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.818705 5003 scope.go:117] "RemoveContainer" containerID="2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.818965 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9"} err="failed to get container status \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": rpc error: code = NotFound desc = could not find container \"2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9\": container with ID starting with 2c85b0326d315a0fad0f2c0696cfeda78e174049bcdc6916e5ffe039222661c9 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.818990 5003 scope.go:117] "RemoveContainer" containerID="1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.819231 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec"} err="failed to get container status \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": rpc error: code = NotFound desc = could not find container \"1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec\": container with ID starting with 1c2c96ef0f029ef640725ce8cdf8c3bea61c6e89faf04bd8c06459b6d27e38ec not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.819361 5003 scope.go:117] "RemoveContainer" containerID="c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.819611 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19"} err="failed to get container status \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": rpc error: code = NotFound desc = could not find container \"c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19\": container with ID starting with c242cd8949aa60d834c7e0a5bd0885ef57feaeabb08233da2cc49dab309a4d19 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.819633 5003 scope.go:117] "RemoveContainer" containerID="7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.820138 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34"} err="failed to get container status \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": rpc error: code = NotFound desc = could not find container \"7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34\": container with ID starting with 7d9b9e76914a1633622ca42e19e7cc3298966bb1e35a6a334e7f5c4d87a87e34 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.820168 5003 scope.go:117] "RemoveContainer" containerID="f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.820540 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2"} err="failed to get container status \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": rpc error: code = NotFound desc = could not find container \"f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2\": container with ID starting with f6b02f84aaf5d235c41570f0398f7c90f4a2b9d05fbc02e3a8106f45d1da78d2 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.820589 5003 scope.go:117] "RemoveContainer" containerID="b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.820892 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68"} err="failed to get container status \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": rpc error: code = NotFound desc = could not find container \"b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68\": container with ID starting with b07407d85c4bb88e029f32e06757899383a4d1e2bb2d79d6b586a1f47670eb68 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.820919 5003 scope.go:117] "RemoveContainer" containerID="03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.821238 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451"} err="failed to get container status \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": rpc error: code = NotFound desc = could not find container \"03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451\": container with ID starting with 03df19f4ceaf13330f56fd2eed3b988c1b96070ba8276fa1ea801cc99f225451 not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.821301 5003 scope.go:117] "RemoveContainer" containerID="33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.821612 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f"} err="failed to get container status \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": rpc error: code = NotFound desc = could not find container \"33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f\": container with ID starting with 33840af9192a169d0cded4e9957ebb475a2aba103eac4ae43b5e7c9da423547f not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.821637 5003 scope.go:117] "RemoveContainer" containerID="be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.821911 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff"} err="failed to get container status \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": rpc error: code = NotFound desc = could not find container \"be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff\": container with ID starting with be29584f3e77913a153b3a36988c2f71cc9d65d4cad3932f4b9707d30ed68eff not found: ID does not exist" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.821955 5003 scope.go:117] "RemoveContainer" containerID="6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489" Jan 26 10:53:10 crc kubenswrapper[5003]: I0126 10:53:10.822241 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489"} err="failed to get container status \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": rpc error: code = NotFound desc = could not find container \"6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489\": container with ID starting with 6a39f11aec371f15707816d1edc1575d8f52de2ac2bc82a32710265afd8ba489 not found: ID does not exist" Jan 26 10:53:11 crc kubenswrapper[5003]: I0126 10:53:11.010625 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a98683-f9ac-45d4-9312-43ebf25bdb52" path="/var/lib/kubelet/pods/f9a98683-f9ac-45d4-9312-43ebf25bdb52/volumes" Jan 26 10:53:11 crc kubenswrapper[5003]: I0126 10:53:11.572998 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"fc4e3e9636a78deef80a1ba3b7c9f0b0f6b74cc76a91da4cbd29778093bbd852"} Jan 26 10:53:11 crc kubenswrapper[5003]: I0126 10:53:11.573324 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"9acafd07a4abfbb3aaa34352dfd734deb43d02194d85ac38f6581a423efbaa34"} Jan 26 10:53:11 crc kubenswrapper[5003]: I0126 10:53:11.573339 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"2e824ca69ffaf841a8ea8815b6c6afb3686d5f7b45c16d6873780fa4d16727f3"} Jan 26 10:53:11 crc kubenswrapper[5003]: I0126 10:53:11.573349 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"50569d8b7328b7a1344d508754a698dc2c019dce2ffd7d25470b27a15d316736"} Jan 26 10:53:11 crc kubenswrapper[5003]: I0126 10:53:11.573357 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"11b0b0e73a100297e083459d4e5a317bc983fdc491666ee000a4496736574d8b"} Jan 26 10:53:11 crc kubenswrapper[5003]: I0126 10:53:11.573366 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"02f1994557a8b1e016a69ac7f04ac252e442a358fc03314b10c30bc3cf0483ec"} Jan 26 10:53:14 crc kubenswrapper[5003]: I0126 10:53:14.597158 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"b66eec7443f6ea01d8b1ba5ac72b03985cb4f0c23e7c9a618da3f4c686f9a7f7"} Jan 26 10:53:16 crc kubenswrapper[5003]: I0126 10:53:16.613092 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" event={"ID":"a05d3b7c-c79f-46cb-9adb-c30b7d0006da","Type":"ContainerStarted","Data":"029dd8d3b471fe7e01668ac8c47b81f7ac5028df21015b4ad4167e379a12c4e9"} Jan 26 10:53:17 crc kubenswrapper[5003]: I0126 10:53:17.618368 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:17 crc kubenswrapper[5003]: I0126 10:53:17.618585 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:17 crc kubenswrapper[5003]: I0126 10:53:17.618673 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:17 crc kubenswrapper[5003]: I0126 10:53:17.655050 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:17 crc kubenswrapper[5003]: I0126 10:53:17.655100 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" podStartSLOduration=8.655082627 podStartE2EDuration="8.655082627s" podCreationTimestamp="2026-01-26 10:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:53:17.652948165 +0000 UTC m=+613.194173726" watchObservedRunningTime="2026-01-26 10:53:17.655082627 +0000 UTC m=+613.196308178" Jan 26 10:53:17 crc kubenswrapper[5003]: I0126 10:53:17.658881 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:24 crc kubenswrapper[5003]: I0126 10:53:24.001376 5003 scope.go:117] "RemoveContainer" containerID="cd218687710b6fabb66404835f025c68b5ee6af1e63c65283186b8190108f4bb" Jan 26 10:53:24 crc kubenswrapper[5003]: E0126 10:53:24.002134 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vpb6l_openshift-multus(9a2a5d08-c449-45c6-8e1f-340c076422db)\"" pod="openshift-multus/multus-vpb6l" podUID="9a2a5d08-c449-45c6-8e1f-340c076422db" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.320721 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q"] Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.322346 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.324438 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.338418 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q"] Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.462720 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.462770 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.462809 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfhfp\" (UniqueName: \"kubernetes.io/projected/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-kube-api-access-jfhfp\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.563557 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.563603 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.563636 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfhfp\" (UniqueName: \"kubernetes.io/projected/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-kube-api-access-jfhfp\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.564461 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.564584 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.583642 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfhfp\" (UniqueName: \"kubernetes.io/projected/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-kube-api-access-jfhfp\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.640345 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: E0126 10:53:33.667603 5003 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba_0(f0daa0719581ba1781d9898dac9c6de5f56904733c690eeaecb2dc83036d342f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 10:53:33 crc kubenswrapper[5003]: E0126 10:53:33.667666 5003 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba_0(f0daa0719581ba1781d9898dac9c6de5f56904733c690eeaecb2dc83036d342f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: E0126 10:53:33.667685 5003 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba_0(f0daa0719581ba1781d9898dac9c6de5f56904733c690eeaecb2dc83036d342f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: E0126 10:53:33.667736 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace(57d5e63e-8e5c-4a9f-ac8b-175ca05409ba)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace(57d5e63e-8e5c-4a9f-ac8b-175ca05409ba)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba_0(f0daa0719581ba1781d9898dac9c6de5f56904733c690eeaecb2dc83036d342f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.708349 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: I0126 10:53:33.709622 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: E0126 10:53:33.739153 5003 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba_0(8ae447619c71e93d6d62e72139b8625cea34385dfd7777efc2ce3e58c15d801d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 10:53:33 crc kubenswrapper[5003]: E0126 10:53:33.739214 5003 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba_0(8ae447619c71e93d6d62e72139b8625cea34385dfd7777efc2ce3e58c15d801d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: E0126 10:53:33.739243 5003 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba_0(8ae447619c71e93d6d62e72139b8625cea34385dfd7777efc2ce3e58c15d801d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:33 crc kubenswrapper[5003]: E0126 10:53:33.739316 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace(57d5e63e-8e5c-4a9f-ac8b-175ca05409ba)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace(57d5e63e-8e5c-4a9f-ac8b-175ca05409ba)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_openshift-marketplace_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba_0(8ae447619c71e93d6d62e72139b8625cea34385dfd7777efc2ce3e58c15d801d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" Jan 26 10:53:36 crc kubenswrapper[5003]: I0126 10:53:36.002656 5003 scope.go:117] "RemoveContainer" containerID="cd218687710b6fabb66404835f025c68b5ee6af1e63c65283186b8190108f4bb" Jan 26 10:53:36 crc kubenswrapper[5003]: I0126 10:53:36.722924 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/2.log" Jan 26 10:53:36 crc kubenswrapper[5003]: I0126 10:53:36.723394 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/1.log" Jan 26 10:53:36 crc kubenswrapper[5003]: I0126 10:53:36.723461 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpb6l" event={"ID":"9a2a5d08-c449-45c6-8e1f-340c076422db","Type":"ContainerStarted","Data":"0cff093313aa2051ee5d169f7b7f19a5de02d5d2d5649d1f2a35f91e647231c7"} Jan 26 10:53:40 crc kubenswrapper[5003]: I0126 10:53:40.252136 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f65b8" Jan 26 10:53:47 crc kubenswrapper[5003]: I0126 10:53:47.000895 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:47 crc kubenswrapper[5003]: I0126 10:53:47.002006 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:47 crc kubenswrapper[5003]: I0126 10:53:47.255369 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q"] Jan 26 10:53:47 crc kubenswrapper[5003]: W0126 10:53:47.262736 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57d5e63e_8e5c_4a9f_ac8b_175ca05409ba.slice/crio-f5bfdf22d4ed36e907861c803e5c2d0430c4c03b925eb1aa3fb1abeecf7991c5 WatchSource:0}: Error finding container f5bfdf22d4ed36e907861c803e5c2d0430c4c03b925eb1aa3fb1abeecf7991c5: Status 404 returned error can't find the container with id f5bfdf22d4ed36e907861c803e5c2d0430c4c03b925eb1aa3fb1abeecf7991c5 Jan 26 10:53:47 crc kubenswrapper[5003]: I0126 10:53:47.777492 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" event={"ID":"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba","Type":"ContainerStarted","Data":"f5bfdf22d4ed36e907861c803e5c2d0430c4c03b925eb1aa3fb1abeecf7991c5"} Jan 26 10:53:48 crc kubenswrapper[5003]: I0126 10:53:48.784856 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" event={"ID":"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba","Type":"ContainerStarted","Data":"a88038aab46cdeb3cab6c2150248229f55df6e571ecb3e0e8584452e140927c1"} Jan 26 10:53:49 crc kubenswrapper[5003]: I0126 10:53:49.790357 5003 generic.go:334] "Generic (PLEG): container finished" podID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerID="a88038aab46cdeb3cab6c2150248229f55df6e571ecb3e0e8584452e140927c1" exitCode=0 Jan 26 10:53:49 crc kubenswrapper[5003]: I0126 10:53:49.790532 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" event={"ID":"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba","Type":"ContainerDied","Data":"a88038aab46cdeb3cab6c2150248229f55df6e571ecb3e0e8584452e140927c1"} Jan 26 10:53:49 crc kubenswrapper[5003]: I0126 10:53:49.792680 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 10:53:54 crc kubenswrapper[5003]: I0126 10:53:54.818164 5003 generic.go:334] "Generic (PLEG): container finished" podID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerID="9556c9c57b58b75d2d2f673946ba9c7022cd8e641268e93e5610bff33350e7ec" exitCode=0 Jan 26 10:53:54 crc kubenswrapper[5003]: I0126 10:53:54.818346 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" event={"ID":"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba","Type":"ContainerDied","Data":"9556c9c57b58b75d2d2f673946ba9c7022cd8e641268e93e5610bff33350e7ec"} Jan 26 10:53:55 crc kubenswrapper[5003]: I0126 10:53:55.825675 5003 generic.go:334] "Generic (PLEG): container finished" podID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerID="9b36fb448ed841bae7166085b1a074a738108012369763e87117c81d70464323" exitCode=0 Jan 26 10:53:55 crc kubenswrapper[5003]: I0126 10:53:55.825830 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" event={"ID":"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba","Type":"ContainerDied","Data":"9b36fb448ed841bae7166085b1a074a738108012369763e87117c81d70464323"} Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.041377 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.163531 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfhfp\" (UniqueName: \"kubernetes.io/projected/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-kube-api-access-jfhfp\") pod \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.163699 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-bundle\") pod \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.163788 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-util\") pod \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\" (UID: \"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba\") " Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.165120 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-bundle" (OuterVolumeSpecName: "bundle") pod "57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" (UID: "57d5e63e-8e5c-4a9f-ac8b-175ca05409ba"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.175120 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-util" (OuterVolumeSpecName: "util") pod "57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" (UID: "57d5e63e-8e5c-4a9f-ac8b-175ca05409ba"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.175706 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-kube-api-access-jfhfp" (OuterVolumeSpecName: "kube-api-access-jfhfp") pod "57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" (UID: "57d5e63e-8e5c-4a9f-ac8b-175ca05409ba"). InnerVolumeSpecName "kube-api-access-jfhfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.266078 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.266143 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-util\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.266164 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfhfp\" (UniqueName: \"kubernetes.io/projected/57d5e63e-8e5c-4a9f-ac8b-175ca05409ba-kube-api-access-jfhfp\") on node \"crc\" DevicePath \"\"" Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.838310 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" event={"ID":"57d5e63e-8e5c-4a9f-ac8b-175ca05409ba","Type":"ContainerDied","Data":"f5bfdf22d4ed36e907861c803e5c2d0430c4c03b925eb1aa3fb1abeecf7991c5"} Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.838351 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5bfdf22d4ed36e907861c803e5c2d0430c4c03b925eb1aa3fb1abeecf7991c5" Jan 26 10:53:57 crc kubenswrapper[5003]: I0126 10:53:57.838389 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q" Jan 26 10:54:05 crc kubenswrapper[5003]: I0126 10:54:05.227127 5003 scope.go:117] "RemoveContainer" containerID="385aa2f7573bd1359a2745d96b174f9df281bd6c5d661a0dfb2e77084cfa011f" Jan 26 10:54:05 crc kubenswrapper[5003]: I0126 10:54:05.882866 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpb6l_9a2a5d08-c449-45c6-8e1f-340c076422db/kube-multus/2.log" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.353464 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg"] Jan 26 10:54:07 crc kubenswrapper[5003]: E0126 10:54:07.353980 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerName="extract" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.353996 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerName="extract" Jan 26 10:54:07 crc kubenswrapper[5003]: E0126 10:54:07.354014 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerName="pull" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.354022 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerName="pull" Jan 26 10:54:07 crc kubenswrapper[5003]: E0126 10:54:07.354046 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerName="util" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.354057 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerName="util" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.354199 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d5e63e-8e5c-4a9f-ac8b-175ca05409ba" containerName="extract" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.354683 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.356528 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-87xjt" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.356737 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.357112 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.357864 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.359037 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.373669 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg"] Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.399225 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-apiservice-cert\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.399402 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsxcr\" (UniqueName: \"kubernetes.io/projected/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-kube-api-access-wsxcr\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.399469 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-webhook-cert\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.500714 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-apiservice-cert\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.500802 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsxcr\" (UniqueName: \"kubernetes.io/projected/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-kube-api-access-wsxcr\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.500837 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-webhook-cert\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.509041 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-webhook-cert\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.515186 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-apiservice-cert\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.517412 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsxcr\" (UniqueName: \"kubernetes.io/projected/cbdc93bb-9371-4da1-843d-e5ec38ca21fd-kube-api-access-wsxcr\") pod \"metallb-operator-controller-manager-7dbc7b9d98-qtfvg\" (UID: \"cbdc93bb-9371-4da1-843d-e5ec38ca21fd\") " pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.575569 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq"] Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.576393 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.579210 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.579211 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.579527 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-bd6jk" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.596089 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq"] Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.602401 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sp8k\" (UniqueName: \"kubernetes.io/projected/69d7a94b-db77-4b43-8fce-9ab7a114d24c-kube-api-access-9sp8k\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.602513 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/69d7a94b-db77-4b43-8fce-9ab7a114d24c-apiservice-cert\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.602550 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/69d7a94b-db77-4b43-8fce-9ab7a114d24c-webhook-cert\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.667367 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.704230 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/69d7a94b-db77-4b43-8fce-9ab7a114d24c-apiservice-cert\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.704313 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/69d7a94b-db77-4b43-8fce-9ab7a114d24c-webhook-cert\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.704355 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sp8k\" (UniqueName: \"kubernetes.io/projected/69d7a94b-db77-4b43-8fce-9ab7a114d24c-kube-api-access-9sp8k\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.711020 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/69d7a94b-db77-4b43-8fce-9ab7a114d24c-webhook-cert\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.711142 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/69d7a94b-db77-4b43-8fce-9ab7a114d24c-apiservice-cert\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.724115 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sp8k\" (UniqueName: \"kubernetes.io/projected/69d7a94b-db77-4b43-8fce-9ab7a114d24c-kube-api-access-9sp8k\") pod \"metallb-operator-webhook-server-6ff7ff8d4c-slczq\" (UID: \"69d7a94b-db77-4b43-8fce-9ab7a114d24c\") " pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:07 crc kubenswrapper[5003]: I0126 10:54:07.896534 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:08 crc kubenswrapper[5003]: I0126 10:54:08.079308 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq"] Jan 26 10:54:08 crc kubenswrapper[5003]: I0126 10:54:08.088378 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg"] Jan 26 10:54:08 crc kubenswrapper[5003]: W0126 10:54:08.102816 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcbdc93bb_9371_4da1_843d_e5ec38ca21fd.slice/crio-04359bcf95b278375161d0f02d12e95765de20cea21f0b14942605d9b4892bb5 WatchSource:0}: Error finding container 04359bcf95b278375161d0f02d12e95765de20cea21f0b14942605d9b4892bb5: Status 404 returned error can't find the container with id 04359bcf95b278375161d0f02d12e95765de20cea21f0b14942605d9b4892bb5 Jan 26 10:54:08 crc kubenswrapper[5003]: I0126 10:54:08.896925 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" event={"ID":"cbdc93bb-9371-4da1-843d-e5ec38ca21fd","Type":"ContainerStarted","Data":"04359bcf95b278375161d0f02d12e95765de20cea21f0b14942605d9b4892bb5"} Jan 26 10:54:08 crc kubenswrapper[5003]: I0126 10:54:08.898212 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" event={"ID":"69d7a94b-db77-4b43-8fce-9ab7a114d24c","Type":"ContainerStarted","Data":"c3650069a6652648ed55bff5302cb0a6d69bd507f2b198c7cffe7d1ead15693a"} Jan 26 10:54:13 crc kubenswrapper[5003]: I0126 10:54:13.925307 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" event={"ID":"cbdc93bb-9371-4da1-843d-e5ec38ca21fd","Type":"ContainerStarted","Data":"27143e0e8835cd4be9caf0c23cde292fd45fbb12772148a3b6ccae5a43e4b415"} Jan 26 10:54:13 crc kubenswrapper[5003]: I0126 10:54:13.925863 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:13 crc kubenswrapper[5003]: I0126 10:54:13.927430 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" event={"ID":"69d7a94b-db77-4b43-8fce-9ab7a114d24c","Type":"ContainerStarted","Data":"5eadcf08fe9ae0003f0d065c89b8d23038ad94508120758973d23eae3a1ad26a"} Jan 26 10:54:13 crc kubenswrapper[5003]: I0126 10:54:13.927581 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:13 crc kubenswrapper[5003]: I0126 10:54:13.959168 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" podStartSLOduration=2.159520366 podStartE2EDuration="6.959150137s" podCreationTimestamp="2026-01-26 10:54:07 +0000 UTC" firstStartedPulling="2026-01-26 10:54:08.106185995 +0000 UTC m=+663.647411556" lastFinishedPulling="2026-01-26 10:54:12.905815776 +0000 UTC m=+668.447041327" observedRunningTime="2026-01-26 10:54:13.952333419 +0000 UTC m=+669.493559000" watchObservedRunningTime="2026-01-26 10:54:13.959150137 +0000 UTC m=+669.500375718" Jan 26 10:54:13 crc kubenswrapper[5003]: I0126 10:54:13.977147 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" podStartSLOduration=2.147865507 podStartE2EDuration="6.97713015s" podCreationTimestamp="2026-01-26 10:54:07 +0000 UTC" firstStartedPulling="2026-01-26 10:54:08.093933578 +0000 UTC m=+663.635159139" lastFinishedPulling="2026-01-26 10:54:12.923198221 +0000 UTC m=+668.464423782" observedRunningTime="2026-01-26 10:54:13.974636288 +0000 UTC m=+669.515861869" watchObservedRunningTime="2026-01-26 10:54:13.97713015 +0000 UTC m=+669.518355721" Jan 26 10:54:27 crc kubenswrapper[5003]: I0126 10:54:27.903646 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6ff7ff8d4c-slczq" Jan 26 10:54:47 crc kubenswrapper[5003]: I0126 10:54:47.671101 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7dbc7b9d98-qtfvg" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.452295 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-gplmk"] Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.455195 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.458756 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.458842 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-bphmt" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.460912 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw"] Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.462202 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.463585 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.463800 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.477940 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw"] Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.534758 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-sockets\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.534802 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-reloader\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.534829 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics-certs\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.534851 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.534892 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b36ce522-21fc-49c5-a4ff-6a6680060a85-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-75hzw\" (UID: \"b36ce522-21fc-49c5-a4ff-6a6680060a85\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.534921 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q2gz\" (UniqueName: \"kubernetes.io/projected/b36ce522-21fc-49c5-a4ff-6a6680060a85-kube-api-access-4q2gz\") pod \"frr-k8s-webhook-server-7df86c4f6c-75hzw\" (UID: \"b36ce522-21fc-49c5-a4ff-6a6680060a85\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.534957 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-conf\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.534995 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-startup\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.535017 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8bg6\" (UniqueName: \"kubernetes.io/projected/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-kube-api-access-c8bg6\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.552683 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-bfmvk"] Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.553674 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.558968 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.560006 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.560014 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-5c6c6" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.561045 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.575531 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-m7jxn"] Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.576589 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.578075 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.597818 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-m7jxn"] Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636124 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q2gz\" (UniqueName: \"kubernetes.io/projected/b36ce522-21fc-49c5-a4ff-6a6680060a85-kube-api-access-4q2gz\") pod \"frr-k8s-webhook-server-7df86c4f6c-75hzw\" (UID: \"b36ce522-21fc-49c5-a4ff-6a6680060a85\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636193 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-cert\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636220 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/104f69d2-ed9b-4607-82f2-649871e3c881-metallb-excludel2\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636247 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2nhn\" (UniqueName: \"kubernetes.io/projected/104f69d2-ed9b-4607-82f2-649871e3c881-kube-api-access-q2nhn\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636293 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-conf\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636334 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-startup\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636355 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-metrics-certs\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636379 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8bg6\" (UniqueName: \"kubernetes.io/projected/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-kube-api-access-c8bg6\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636406 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-metrics-certs\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-sockets\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636472 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-reloader\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636498 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics-certs\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636518 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636541 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636571 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b36ce522-21fc-49c5-a4ff-6a6680060a85-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-75hzw\" (UID: \"b36ce522-21fc-49c5-a4ff-6a6680060a85\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.636587 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9sm7\" (UniqueName: \"kubernetes.io/projected/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-kube-api-access-f9sm7\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.637267 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-sockets\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.637320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-startup\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.637474 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-reloader\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.637538 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-frr-conf\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: E0126 10:54:48.637545 5003 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Jan 26 10:54:48 crc kubenswrapper[5003]: E0126 10:54:48.637584 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics-certs podName:8ffa174f-aed7-4f5b-9feb-62a45fc68d0b nodeName:}" failed. No retries permitted until 2026-01-26 10:54:49.137569201 +0000 UTC m=+704.678794762 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics-certs") pod "frr-k8s-gplmk" (UID: "8ffa174f-aed7-4f5b-9feb-62a45fc68d0b") : secret "frr-k8s-certs-secret" not found Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.637718 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.645063 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b36ce522-21fc-49c5-a4ff-6a6680060a85-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-75hzw\" (UID: \"b36ce522-21fc-49c5-a4ff-6a6680060a85\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.654787 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q2gz\" (UniqueName: \"kubernetes.io/projected/b36ce522-21fc-49c5-a4ff-6a6680060a85-kube-api-access-4q2gz\") pod \"frr-k8s-webhook-server-7df86c4f6c-75hzw\" (UID: \"b36ce522-21fc-49c5-a4ff-6a6680060a85\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.654910 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8bg6\" (UniqueName: \"kubernetes.io/projected/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-kube-api-access-c8bg6\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.737855 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.737911 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9sm7\" (UniqueName: \"kubernetes.io/projected/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-kube-api-access-f9sm7\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.737936 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-cert\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.737950 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/104f69d2-ed9b-4607-82f2-649871e3c881-metallb-excludel2\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.737971 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2nhn\" (UniqueName: \"kubernetes.io/projected/104f69d2-ed9b-4607-82f2-649871e3c881-kube-api-access-q2nhn\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.738006 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-metrics-certs\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.738028 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-metrics-certs\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: E0126 10:54:48.738036 5003 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 26 10:54:48 crc kubenswrapper[5003]: E0126 10:54:48.738101 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist podName:104f69d2-ed9b-4607-82f2-649871e3c881 nodeName:}" failed. No retries permitted until 2026-01-26 10:54:49.238084407 +0000 UTC m=+704.779309968 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist") pod "speaker-bfmvk" (UID: "104f69d2-ed9b-4607-82f2-649871e3c881") : secret "metallb-memberlist" not found Jan 26 10:54:48 crc kubenswrapper[5003]: E0126 10:54:48.738135 5003 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Jan 26 10:54:48 crc kubenswrapper[5003]: E0126 10:54:48.738182 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-metrics-certs podName:0f4f27a6-9cd9-4b96-90d2-dd695d64362c nodeName:}" failed. No retries permitted until 2026-01-26 10:54:49.238166569 +0000 UTC m=+704.779392130 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-metrics-certs") pod "controller-6968d8fdc4-m7jxn" (UID: "0f4f27a6-9cd9-4b96-90d2-dd695d64362c") : secret "controller-certs-secret" not found Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.738864 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/104f69d2-ed9b-4607-82f2-649871e3c881-metallb-excludel2\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.741557 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-cert\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.749759 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-metrics-certs\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.753820 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2nhn\" (UniqueName: \"kubernetes.io/projected/104f69d2-ed9b-4607-82f2-649871e3c881-kube-api-access-q2nhn\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.754258 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9sm7\" (UniqueName: \"kubernetes.io/projected/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-kube-api-access-f9sm7\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:48 crc kubenswrapper[5003]: I0126 10:54:48.788369 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.142781 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics-certs\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.149869 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8ffa174f-aed7-4f5b-9feb-62a45fc68d0b-metrics-certs\") pod \"frr-k8s-gplmk\" (UID: \"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b\") " pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.244021 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-metrics-certs\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.244098 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:49 crc kubenswrapper[5003]: E0126 10:54:49.244229 5003 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 26 10:54:49 crc kubenswrapper[5003]: E0126 10:54:49.244305 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist podName:104f69d2-ed9b-4607-82f2-649871e3c881 nodeName:}" failed. No retries permitted until 2026-01-26 10:54:50.244275872 +0000 UTC m=+705.785501433 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist") pod "speaker-bfmvk" (UID: "104f69d2-ed9b-4607-82f2-649871e3c881") : secret "metallb-memberlist" not found Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.254142 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw"] Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.254269 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0f4f27a6-9cd9-4b96-90d2-dd695d64362c-metrics-certs\") pod \"controller-6968d8fdc4-m7jxn\" (UID: \"0f4f27a6-9cd9-4b96-90d2-dd695d64362c\") " pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:49 crc kubenswrapper[5003]: W0126 10:54:49.261967 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb36ce522_21fc_49c5_a4ff_6a6680060a85.slice/crio-f4cff8cf05f32abdc289cb40b3ca137823553defd400ba6f9e24cccb4cbff10a WatchSource:0}: Error finding container f4cff8cf05f32abdc289cb40b3ca137823553defd400ba6f9e24cccb4cbff10a: Status 404 returned error can't find the container with id f4cff8cf05f32abdc289cb40b3ca137823553defd400ba6f9e24cccb4cbff10a Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.375799 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-gplmk" Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.491463 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:49 crc kubenswrapper[5003]: I0126 10:54:49.675165 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-m7jxn"] Jan 26 10:54:50 crc kubenswrapper[5003]: I0126 10:54:50.170722 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-m7jxn" event={"ID":"0f4f27a6-9cd9-4b96-90d2-dd695d64362c","Type":"ContainerStarted","Data":"e9ddd64f809c06af18a058cf6f774fce84a14747bc141c47afbecc6b21a49a75"} Jan 26 10:54:50 crc kubenswrapper[5003]: I0126 10:54:50.171765 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" event={"ID":"b36ce522-21fc-49c5-a4ff-6a6680060a85","Type":"ContainerStarted","Data":"f4cff8cf05f32abdc289cb40b3ca137823553defd400ba6f9e24cccb4cbff10a"} Jan 26 10:54:50 crc kubenswrapper[5003]: I0126 10:54:50.259040 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:50 crc kubenswrapper[5003]: E0126 10:54:50.259355 5003 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 26 10:54:50 crc kubenswrapper[5003]: E0126 10:54:50.259468 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist podName:104f69d2-ed9b-4607-82f2-649871e3c881 nodeName:}" failed. No retries permitted until 2026-01-26 10:54:52.259440742 +0000 UTC m=+707.800666343 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist") pod "speaker-bfmvk" (UID: "104f69d2-ed9b-4607-82f2-649871e3c881") : secret "metallb-memberlist" not found Jan 26 10:54:51 crc kubenswrapper[5003]: I0126 10:54:51.195627 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-m7jxn" event={"ID":"0f4f27a6-9cd9-4b96-90d2-dd695d64362c","Type":"ContainerStarted","Data":"486a351ef9699bc667c295f2dc321675f687ea2cf7552bd3bd6f0a9419d022d5"} Jan 26 10:54:51 crc kubenswrapper[5003]: I0126 10:54:51.205968 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerStarted","Data":"5073664f40514278766625b92f3495adc21b1411e959328bb93f343b98407181"} Jan 26 10:54:52 crc kubenswrapper[5003]: I0126 10:54:52.299007 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:52 crc kubenswrapper[5003]: I0126 10:54:52.311042 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/104f69d2-ed9b-4607-82f2-649871e3c881-memberlist\") pod \"speaker-bfmvk\" (UID: \"104f69d2-ed9b-4607-82f2-649871e3c881\") " pod="metallb-system/speaker-bfmvk" Jan 26 10:54:52 crc kubenswrapper[5003]: I0126 10:54:52.469045 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bfmvk" Jan 26 10:54:53 crc kubenswrapper[5003]: I0126 10:54:53.217990 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bfmvk" event={"ID":"104f69d2-ed9b-4607-82f2-649871e3c881","Type":"ContainerStarted","Data":"b891ea8276da1feb9e2562c0dc6c4d9958d2aa2bf5cdc7b045caf39c3c3dad19"} Jan 26 10:54:53 crc kubenswrapper[5003]: I0126 10:54:53.218026 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bfmvk" event={"ID":"104f69d2-ed9b-4607-82f2-649871e3c881","Type":"ContainerStarted","Data":"e00b124d49785b1d3c71a4ef9b79ec8442ee356982d90606314c10a3c55c03d7"} Jan 26 10:54:54 crc kubenswrapper[5003]: I0126 10:54:54.233175 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-m7jxn" event={"ID":"0f4f27a6-9cd9-4b96-90d2-dd695d64362c","Type":"ContainerStarted","Data":"9e9a03301eccf99d400d9357e9223a00679bbf8102c7ef2c0a2292e81ea7468d"} Jan 26 10:54:54 crc kubenswrapper[5003]: I0126 10:54:54.234215 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:54:55 crc kubenswrapper[5003]: I0126 10:54:55.036445 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-m7jxn" podStartSLOduration=4.054514263 podStartE2EDuration="7.036420103s" podCreationTimestamp="2026-01-26 10:54:48 +0000 UTC" firstStartedPulling="2026-01-26 10:54:50.626799875 +0000 UTC m=+706.168025446" lastFinishedPulling="2026-01-26 10:54:53.608705725 +0000 UTC m=+709.149931286" observedRunningTime="2026-01-26 10:54:54.25302216 +0000 UTC m=+709.794247721" watchObservedRunningTime="2026-01-26 10:54:55.036420103 +0000 UTC m=+710.577645654" Jan 26 10:54:55 crc kubenswrapper[5003]: I0126 10:54:55.244107 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bfmvk" event={"ID":"104f69d2-ed9b-4607-82f2-649871e3c881","Type":"ContainerStarted","Data":"43a5c4cfdebcbd4a983ba7b687f3ac9e43b94f42b9c5e909f31beb1cbd43a70c"} Jan 26 10:54:55 crc kubenswrapper[5003]: I0126 10:54:55.250499 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-bfmvk" Jan 26 10:54:55 crc kubenswrapper[5003]: I0126 10:54:55.269359 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-bfmvk" podStartSLOduration=6.069365833 podStartE2EDuration="7.269344093s" podCreationTimestamp="2026-01-26 10:54:48 +0000 UTC" firstStartedPulling="2026-01-26 10:54:52.874172443 +0000 UTC m=+708.415398004" lastFinishedPulling="2026-01-26 10:54:54.074150703 +0000 UTC m=+709.615376264" observedRunningTime="2026-01-26 10:54:55.266274934 +0000 UTC m=+710.807500485" watchObservedRunningTime="2026-01-26 10:54:55.269344093 +0000 UTC m=+710.810569654" Jan 26 10:54:58 crc kubenswrapper[5003]: I0126 10:54:58.269259 5003 generic.go:334] "Generic (PLEG): container finished" podID="8ffa174f-aed7-4f5b-9feb-62a45fc68d0b" containerID="aa1d900f3bfd3e9cd5ca1ff28f5448a29babba6d0149579d55a70e054ed99ccf" exitCode=0 Jan 26 10:54:58 crc kubenswrapper[5003]: I0126 10:54:58.269510 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerDied","Data":"aa1d900f3bfd3e9cd5ca1ff28f5448a29babba6d0149579d55a70e054ed99ccf"} Jan 26 10:54:58 crc kubenswrapper[5003]: I0126 10:54:58.272267 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" event={"ID":"b36ce522-21fc-49c5-a4ff-6a6680060a85","Type":"ContainerStarted","Data":"c0d2491cce8f97715a9030304e6111848d1fca6154e6311b1da8cf2275b6346d"} Jan 26 10:54:58 crc kubenswrapper[5003]: I0126 10:54:58.272535 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:54:58 crc kubenswrapper[5003]: I0126 10:54:58.349005 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" podStartSLOduration=1.60674417 podStartE2EDuration="10.348981552s" podCreationTimestamp="2026-01-26 10:54:48 +0000 UTC" firstStartedPulling="2026-01-26 10:54:49.264034997 +0000 UTC m=+704.805260568" lastFinishedPulling="2026-01-26 10:54:58.006272389 +0000 UTC m=+713.547497950" observedRunningTime="2026-01-26 10:54:58.346594584 +0000 UTC m=+713.887820165" watchObservedRunningTime="2026-01-26 10:54:58.348981552 +0000 UTC m=+713.890207133" Jan 26 10:54:59 crc kubenswrapper[5003]: I0126 10:54:59.280957 5003 generic.go:334] "Generic (PLEG): container finished" podID="8ffa174f-aed7-4f5b-9feb-62a45fc68d0b" containerID="5a19095e63f26c503c00623eb8381442149bc516ffcfc630c9c2ee8cdec6330e" exitCode=0 Jan 26 10:54:59 crc kubenswrapper[5003]: I0126 10:54:59.281058 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerDied","Data":"5a19095e63f26c503c00623eb8381442149bc516ffcfc630c9c2ee8cdec6330e"} Jan 26 10:55:00 crc kubenswrapper[5003]: I0126 10:55:00.292667 5003 generic.go:334] "Generic (PLEG): container finished" podID="8ffa174f-aed7-4f5b-9feb-62a45fc68d0b" containerID="5b9e9dcc2957cd99d3950a3b95f552031fbeeec079d55c1d3c842042ace0c8ce" exitCode=0 Jan 26 10:55:00 crc kubenswrapper[5003]: I0126 10:55:00.292738 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerDied","Data":"5b9e9dcc2957cd99d3950a3b95f552031fbeeec079d55c1d3c842042ace0c8ce"} Jan 26 10:55:01 crc kubenswrapper[5003]: I0126 10:55:01.307239 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerStarted","Data":"cfaae78731796bcc6baa6eda2a0623be08d17b381c78c61b3ffc73d2512e6cb8"} Jan 26 10:55:01 crc kubenswrapper[5003]: I0126 10:55:01.307292 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerStarted","Data":"66838dfa6d1ff8284db95f7deeb6ac0c7e30861d49bf15056f82f0174e94b6e0"} Jan 26 10:55:01 crc kubenswrapper[5003]: I0126 10:55:01.307305 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerStarted","Data":"703598caa30fb3b6c6dd701bbcefe4f1707efb6befdaa1438fcca22de1abc1fd"} Jan 26 10:55:01 crc kubenswrapper[5003]: I0126 10:55:01.307313 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerStarted","Data":"be873ffae9ea03e55e2aa0118cbf5bc643b594020ba121bf52586deec06c5a72"} Jan 26 10:55:02 crc kubenswrapper[5003]: I0126 10:55:02.321528 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerStarted","Data":"29f2059366dcf554dcc83f8e7a134b76761893d79214d21076f922c6b7ba1b60"} Jan 26 10:55:02 crc kubenswrapper[5003]: I0126 10:55:02.321882 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-gplmk" Jan 26 10:55:02 crc kubenswrapper[5003]: I0126 10:55:02.321904 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gplmk" event={"ID":"8ffa174f-aed7-4f5b-9feb-62a45fc68d0b","Type":"ContainerStarted","Data":"24da696a674b9a4014b46eb76088e41392dda842eb1f10025a598a656f83ddb8"} Jan 26 10:55:02 crc kubenswrapper[5003]: I0126 10:55:02.366693 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-gplmk" podStartSLOduration=6.94515016 podStartE2EDuration="14.366667099s" podCreationTimestamp="2026-01-26 10:54:48 +0000 UTC" firstStartedPulling="2026-01-26 10:54:50.612899061 +0000 UTC m=+706.154124662" lastFinishedPulling="2026-01-26 10:54:58.03441604 +0000 UTC m=+713.575641601" observedRunningTime="2026-01-26 10:55:02.359807574 +0000 UTC m=+717.901033175" watchObservedRunningTime="2026-01-26 10:55:02.366667099 +0000 UTC m=+717.907892700" Jan 26 10:55:02 crc kubenswrapper[5003]: I0126 10:55:02.473475 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-bfmvk" Jan 26 10:55:04 crc kubenswrapper[5003]: I0126 10:55:04.376218 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-gplmk" Jan 26 10:55:04 crc kubenswrapper[5003]: I0126 10:55:04.416388 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-gplmk" Jan 26 10:55:08 crc kubenswrapper[5003]: I0126 10:55:08.792882 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-75hzw" Jan 26 10:55:08 crc kubenswrapper[5003]: I0126 10:55:08.888063 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-dgcmz"] Jan 26 10:55:08 crc kubenswrapper[5003]: I0126 10:55:08.888759 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-dgcmz" Jan 26 10:55:08 crc kubenswrapper[5003]: I0126 10:55:08.892475 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 26 10:55:08 crc kubenswrapper[5003]: I0126 10:55:08.905606 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 26 10:55:08 crc kubenswrapper[5003]: I0126 10:55:08.905652 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-s4szd" Jan 26 10:55:08 crc kubenswrapper[5003]: I0126 10:55:08.909982 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-dgcmz"] Jan 26 10:55:09 crc kubenswrapper[5003]: I0126 10:55:09.040172 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:55:09 crc kubenswrapper[5003]: I0126 10:55:09.040221 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:55:09 crc kubenswrapper[5003]: I0126 10:55:09.054641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fgnm\" (UniqueName: \"kubernetes.io/projected/b1784b25-6f15-49fc-af3b-089ccac7bd40-kube-api-access-5fgnm\") pod \"mariadb-operator-index-dgcmz\" (UID: \"b1784b25-6f15-49fc-af3b-089ccac7bd40\") " pod="openstack-operators/mariadb-operator-index-dgcmz" Jan 26 10:55:09 crc kubenswrapper[5003]: I0126 10:55:09.156600 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fgnm\" (UniqueName: \"kubernetes.io/projected/b1784b25-6f15-49fc-af3b-089ccac7bd40-kube-api-access-5fgnm\") pod \"mariadb-operator-index-dgcmz\" (UID: \"b1784b25-6f15-49fc-af3b-089ccac7bd40\") " pod="openstack-operators/mariadb-operator-index-dgcmz" Jan 26 10:55:09 crc kubenswrapper[5003]: I0126 10:55:09.183596 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fgnm\" (UniqueName: \"kubernetes.io/projected/b1784b25-6f15-49fc-af3b-089ccac7bd40-kube-api-access-5fgnm\") pod \"mariadb-operator-index-dgcmz\" (UID: \"b1784b25-6f15-49fc-af3b-089ccac7bd40\") " pod="openstack-operators/mariadb-operator-index-dgcmz" Jan 26 10:55:09 crc kubenswrapper[5003]: I0126 10:55:09.213162 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-dgcmz" Jan 26 10:55:09 crc kubenswrapper[5003]: I0126 10:55:09.424763 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-dgcmz"] Jan 26 10:55:09 crc kubenswrapper[5003]: W0126 10:55:09.427450 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1784b25_6f15_49fc_af3b_089ccac7bd40.slice/crio-720f9e890de37ee9abcf6ed1eeff990cb5fdb6b0f2b7c6c5f56fc7eb4ace424e WatchSource:0}: Error finding container 720f9e890de37ee9abcf6ed1eeff990cb5fdb6b0f2b7c6c5f56fc7eb4ace424e: Status 404 returned error can't find the container with id 720f9e890de37ee9abcf6ed1eeff990cb5fdb6b0f2b7c6c5f56fc7eb4ace424e Jan 26 10:55:09 crc kubenswrapper[5003]: I0126 10:55:09.498570 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-m7jxn" Jan 26 10:55:10 crc kubenswrapper[5003]: I0126 10:55:10.373378 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-dgcmz" event={"ID":"b1784b25-6f15-49fc-af3b-089ccac7bd40","Type":"ContainerStarted","Data":"720f9e890de37ee9abcf6ed1eeff990cb5fdb6b0f2b7c6c5f56fc7eb4ace424e"} Jan 26 10:55:12 crc kubenswrapper[5003]: I0126 10:55:12.271841 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-dgcmz"] Jan 26 10:55:12 crc kubenswrapper[5003]: I0126 10:55:12.388841 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-dgcmz" event={"ID":"b1784b25-6f15-49fc-af3b-089ccac7bd40","Type":"ContainerStarted","Data":"e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb"} Jan 26 10:55:12 crc kubenswrapper[5003]: I0126 10:55:12.409715 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-dgcmz" podStartSLOduration=2.438334119 podStartE2EDuration="4.409691652s" podCreationTimestamp="2026-01-26 10:55:08 +0000 UTC" firstStartedPulling="2026-01-26 10:55:09.431600078 +0000 UTC m=+724.972825639" lastFinishedPulling="2026-01-26 10:55:11.402957611 +0000 UTC m=+726.944183172" observedRunningTime="2026-01-26 10:55:12.408820317 +0000 UTC m=+727.950045878" watchObservedRunningTime="2026-01-26 10:55:12.409691652 +0000 UTC m=+727.950917223" Jan 26 10:55:12 crc kubenswrapper[5003]: I0126 10:55:12.870542 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-7hhgf"] Jan 26 10:55:12 crc kubenswrapper[5003]: I0126 10:55:12.871555 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:12 crc kubenswrapper[5003]: I0126 10:55:12.881455 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-7hhgf"] Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.002217 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chz4n\" (UniqueName: \"kubernetes.io/projected/6c588250-7629-448a-9007-f31db35eab93-kube-api-access-chz4n\") pod \"mariadb-operator-index-7hhgf\" (UID: \"6c588250-7629-448a-9007-f31db35eab93\") " pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.103828 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chz4n\" (UniqueName: \"kubernetes.io/projected/6c588250-7629-448a-9007-f31db35eab93-kube-api-access-chz4n\") pod \"mariadb-operator-index-7hhgf\" (UID: \"6c588250-7629-448a-9007-f31db35eab93\") " pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.131115 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chz4n\" (UniqueName: \"kubernetes.io/projected/6c588250-7629-448a-9007-f31db35eab93-kube-api-access-chz4n\") pod \"mariadb-operator-index-7hhgf\" (UID: \"6c588250-7629-448a-9007-f31db35eab93\") " pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.189115 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.395336 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-dgcmz" podUID="b1784b25-6f15-49fc-af3b-089ccac7bd40" containerName="registry-server" containerID="cri-o://e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb" gracePeriod=2 Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.605836 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-7hhgf"] Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.693676 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-dgcmz" Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.818403 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fgnm\" (UniqueName: \"kubernetes.io/projected/b1784b25-6f15-49fc-af3b-089ccac7bd40-kube-api-access-5fgnm\") pod \"b1784b25-6f15-49fc-af3b-089ccac7bd40\" (UID: \"b1784b25-6f15-49fc-af3b-089ccac7bd40\") " Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.826946 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1784b25-6f15-49fc-af3b-089ccac7bd40-kube-api-access-5fgnm" (OuterVolumeSpecName: "kube-api-access-5fgnm") pod "b1784b25-6f15-49fc-af3b-089ccac7bd40" (UID: "b1784b25-6f15-49fc-af3b-089ccac7bd40"). InnerVolumeSpecName "kube-api-access-5fgnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:55:13 crc kubenswrapper[5003]: I0126 10:55:13.920579 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fgnm\" (UniqueName: \"kubernetes.io/projected/b1784b25-6f15-49fc-af3b-089ccac7bd40-kube-api-access-5fgnm\") on node \"crc\" DevicePath \"\"" Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.404069 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1784b25-6f15-49fc-af3b-089ccac7bd40" containerID="e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb" exitCode=0 Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.404119 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-dgcmz" event={"ID":"b1784b25-6f15-49fc-af3b-089ccac7bd40","Type":"ContainerDied","Data":"e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb"} Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.404144 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-dgcmz" Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.404800 5003 scope.go:117] "RemoveContainer" containerID="e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb" Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.404641 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-dgcmz" event={"ID":"b1784b25-6f15-49fc-af3b-089ccac7bd40","Type":"ContainerDied","Data":"720f9e890de37ee9abcf6ed1eeff990cb5fdb6b0f2b7c6c5f56fc7eb4ace424e"} Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.406276 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-7hhgf" event={"ID":"6c588250-7629-448a-9007-f31db35eab93","Type":"ContainerStarted","Data":"aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90"} Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.406301 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-7hhgf" event={"ID":"6c588250-7629-448a-9007-f31db35eab93","Type":"ContainerStarted","Data":"8927abc2579628f6a3ba9d80c9da069b57f323c97bb96722ff929492cd78d177"} Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.429898 5003 scope.go:117] "RemoveContainer" containerID="e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb" Jan 26 10:55:14 crc kubenswrapper[5003]: E0126 10:55:14.433497 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb\": container with ID starting with e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb not found: ID does not exist" containerID="e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb" Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.433579 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb"} err="failed to get container status \"e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb\": rpc error: code = NotFound desc = could not find container \"e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb\": container with ID starting with e7c4418a56c092495cd8be90c00544f532b66c898868a14dd5f6018d594116eb not found: ID does not exist" Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.439732 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-7hhgf" podStartSLOduration=2.022451519 podStartE2EDuration="2.439714834s" podCreationTimestamp="2026-01-26 10:55:12 +0000 UTC" firstStartedPulling="2026-01-26 10:55:13.63317134 +0000 UTC m=+729.174396901" lastFinishedPulling="2026-01-26 10:55:14.050434655 +0000 UTC m=+729.591660216" observedRunningTime="2026-01-26 10:55:14.435407361 +0000 UTC m=+729.976632922" watchObservedRunningTime="2026-01-26 10:55:14.439714834 +0000 UTC m=+729.980940395" Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.459291 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-dgcmz"] Jan 26 10:55:14 crc kubenswrapper[5003]: I0126 10:55:14.465710 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-dgcmz"] Jan 26 10:55:15 crc kubenswrapper[5003]: I0126 10:55:15.013234 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1784b25-6f15-49fc-af3b-089ccac7bd40" path="/var/lib/kubelet/pods/b1784b25-6f15-49fc-af3b-089ccac7bd40/volumes" Jan 26 10:55:19 crc kubenswrapper[5003]: I0126 10:55:19.380700 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-gplmk" Jan 26 10:55:23 crc kubenswrapper[5003]: I0126 10:55:23.193472 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:23 crc kubenswrapper[5003]: I0126 10:55:23.194032 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:23 crc kubenswrapper[5003]: I0126 10:55:23.234898 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:23 crc kubenswrapper[5003]: I0126 10:55:23.491947 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.889721 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l"] Jan 26 10:55:29 crc kubenswrapper[5003]: E0126 10:55:29.890590 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1784b25-6f15-49fc-af3b-089ccac7bd40" containerName="registry-server" Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.890612 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1784b25-6f15-49fc-af3b-089ccac7bd40" containerName="registry-server" Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.890802 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1784b25-6f15-49fc-af3b-089ccac7bd40" containerName="registry-server" Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.891984 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.894259 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-x8f6q" Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.906450 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l"] Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.964957 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-bundle\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.965334 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-util\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:29 crc kubenswrapper[5003]: I0126 10:55:29.965486 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftk5q\" (UniqueName: \"kubernetes.io/projected/35801d0d-b495-49ae-85b2-3f3bccd0241c-kube-api-access-ftk5q\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.066420 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftk5q\" (UniqueName: \"kubernetes.io/projected/35801d0d-b495-49ae-85b2-3f3bccd0241c-kube-api-access-ftk5q\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.066501 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-bundle\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.066571 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-util\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.067153 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-util\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.067436 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-bundle\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.088546 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftk5q\" (UniqueName: \"kubernetes.io/projected/35801d0d-b495-49ae-85b2-3f3bccd0241c-kube-api-access-ftk5q\") pod \"2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.216326 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.444005 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l"] Jan 26 10:55:30 crc kubenswrapper[5003]: W0126 10:55:30.452377 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35801d0d_b495_49ae_85b2_3f3bccd0241c.slice/crio-f18a862484d84f9d63c82dc2bb39abbb10d80ef1a1cc58ae57cf15107805ada2 WatchSource:0}: Error finding container f18a862484d84f9d63c82dc2bb39abbb10d80ef1a1cc58ae57cf15107805ada2: Status 404 returned error can't find the container with id f18a862484d84f9d63c82dc2bb39abbb10d80ef1a1cc58ae57cf15107805ada2 Jan 26 10:55:30 crc kubenswrapper[5003]: I0126 10:55:30.515242 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" event={"ID":"35801d0d-b495-49ae-85b2-3f3bccd0241c","Type":"ContainerStarted","Data":"f18a862484d84f9d63c82dc2bb39abbb10d80ef1a1cc58ae57cf15107805ada2"} Jan 26 10:55:31 crc kubenswrapper[5003]: I0126 10:55:31.524610 5003 generic.go:334] "Generic (PLEG): container finished" podID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerID="e04173a0052a85c4ef9a039ded556917dc49db18e7a7e5e7f1564993575fbb69" exitCode=0 Jan 26 10:55:31 crc kubenswrapper[5003]: I0126 10:55:31.524710 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" event={"ID":"35801d0d-b495-49ae-85b2-3f3bccd0241c","Type":"ContainerDied","Data":"e04173a0052a85c4ef9a039ded556917dc49db18e7a7e5e7f1564993575fbb69"} Jan 26 10:55:32 crc kubenswrapper[5003]: I0126 10:55:32.532908 5003 generic.go:334] "Generic (PLEG): container finished" podID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerID="3f8e0fc04b3b211b98dd54ea444fd48d86d4fc6eae8d898598472e94ef2bb5ac" exitCode=0 Jan 26 10:55:32 crc kubenswrapper[5003]: I0126 10:55:32.532954 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" event={"ID":"35801d0d-b495-49ae-85b2-3f3bccd0241c","Type":"ContainerDied","Data":"3f8e0fc04b3b211b98dd54ea444fd48d86d4fc6eae8d898598472e94ef2bb5ac"} Jan 26 10:55:33 crc kubenswrapper[5003]: I0126 10:55:33.542859 5003 generic.go:334] "Generic (PLEG): container finished" podID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerID="94566121d3406f44f1ac5513fe57f3b41239f4f87b36ac20435b288ddc785e80" exitCode=0 Jan 26 10:55:33 crc kubenswrapper[5003]: I0126 10:55:33.542908 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" event={"ID":"35801d0d-b495-49ae-85b2-3f3bccd0241c","Type":"ContainerDied","Data":"94566121d3406f44f1ac5513fe57f3b41239f4f87b36ac20435b288ddc785e80"} Jan 26 10:55:34 crc kubenswrapper[5003]: I0126 10:55:34.805652 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:34 crc kubenswrapper[5003]: I0126 10:55:34.928071 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftk5q\" (UniqueName: \"kubernetes.io/projected/35801d0d-b495-49ae-85b2-3f3bccd0241c-kube-api-access-ftk5q\") pod \"35801d0d-b495-49ae-85b2-3f3bccd0241c\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " Jan 26 10:55:34 crc kubenswrapper[5003]: I0126 10:55:34.928174 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-util\") pod \"35801d0d-b495-49ae-85b2-3f3bccd0241c\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " Jan 26 10:55:34 crc kubenswrapper[5003]: I0126 10:55:34.928270 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-bundle\") pod \"35801d0d-b495-49ae-85b2-3f3bccd0241c\" (UID: \"35801d0d-b495-49ae-85b2-3f3bccd0241c\") " Jan 26 10:55:34 crc kubenswrapper[5003]: I0126 10:55:34.929461 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-bundle" (OuterVolumeSpecName: "bundle") pod "35801d0d-b495-49ae-85b2-3f3bccd0241c" (UID: "35801d0d-b495-49ae-85b2-3f3bccd0241c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:55:34 crc kubenswrapper[5003]: I0126 10:55:34.935654 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35801d0d-b495-49ae-85b2-3f3bccd0241c-kube-api-access-ftk5q" (OuterVolumeSpecName: "kube-api-access-ftk5q") pod "35801d0d-b495-49ae-85b2-3f3bccd0241c" (UID: "35801d0d-b495-49ae-85b2-3f3bccd0241c"). InnerVolumeSpecName "kube-api-access-ftk5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:55:34 crc kubenswrapper[5003]: I0126 10:55:34.948591 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-util" (OuterVolumeSpecName: "util") pod "35801d0d-b495-49ae-85b2-3f3bccd0241c" (UID: "35801d0d-b495-49ae-85b2-3f3bccd0241c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:55:35 crc kubenswrapper[5003]: I0126 10:55:35.029211 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:55:35 crc kubenswrapper[5003]: I0126 10:55:35.029241 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftk5q\" (UniqueName: \"kubernetes.io/projected/35801d0d-b495-49ae-85b2-3f3bccd0241c-kube-api-access-ftk5q\") on node \"crc\" DevicePath \"\"" Jan 26 10:55:35 crc kubenswrapper[5003]: I0126 10:55:35.029249 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/35801d0d-b495-49ae-85b2-3f3bccd0241c-util\") on node \"crc\" DevicePath \"\"" Jan 26 10:55:35 crc kubenswrapper[5003]: I0126 10:55:35.562451 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" event={"ID":"35801d0d-b495-49ae-85b2-3f3bccd0241c","Type":"ContainerDied","Data":"f18a862484d84f9d63c82dc2bb39abbb10d80ef1a1cc58ae57cf15107805ada2"} Jan 26 10:55:35 crc kubenswrapper[5003]: I0126 10:55:35.562524 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f18a862484d84f9d63c82dc2bb39abbb10d80ef1a1cc58ae57cf15107805ada2" Jan 26 10:55:35 crc kubenswrapper[5003]: I0126 10:55:35.562572 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l" Jan 26 10:55:39 crc kubenswrapper[5003]: I0126 10:55:39.039575 5003 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 10:55:39 crc kubenswrapper[5003]: I0126 10:55:39.040488 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:55:39 crc kubenswrapper[5003]: I0126 10:55:39.040573 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.018871 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b"] Jan 26 10:55:43 crc kubenswrapper[5003]: E0126 10:55:43.019648 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerName="extract" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.019664 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerName="extract" Jan 26 10:55:43 crc kubenswrapper[5003]: E0126 10:55:43.019679 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerName="util" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.019686 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerName="util" Jan 26 10:55:43 crc kubenswrapper[5003]: E0126 10:55:43.019701 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerName="pull" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.019709 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerName="pull" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.019831 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="35801d0d-b495-49ae-85b2-3f3bccd0241c" containerName="extract" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.020268 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.022134 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.024910 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.025952 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-fvxc4" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.031087 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b"] Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.031540 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k27tm\" (UniqueName: \"kubernetes.io/projected/65018825-022b-422d-9c2a-3f22f2619d1a-kube-api-access-k27tm\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.031880 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-webhook-cert\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.031972 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-apiservice-cert\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.133062 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k27tm\" (UniqueName: \"kubernetes.io/projected/65018825-022b-422d-9c2a-3f22f2619d1a-kube-api-access-k27tm\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.133150 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-webhook-cert\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.133181 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-apiservice-cert\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.139205 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-webhook-cert\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.141316 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-apiservice-cert\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.156876 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k27tm\" (UniqueName: \"kubernetes.io/projected/65018825-022b-422d-9c2a-3f22f2619d1a-kube-api-access-k27tm\") pod \"mariadb-operator-controller-manager-78fc8bbd6f-lbg4b\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.344837 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:43 crc kubenswrapper[5003]: I0126 10:55:43.630121 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b"] Jan 26 10:55:44 crc kubenswrapper[5003]: I0126 10:55:44.618272 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" event={"ID":"65018825-022b-422d-9c2a-3f22f2619d1a","Type":"ContainerStarted","Data":"de2d284a1b855d15a43972864ae136e367fd73d8d80f5403d948a50ae9910db2"} Jan 26 10:55:47 crc kubenswrapper[5003]: I0126 10:55:47.636949 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" event={"ID":"65018825-022b-422d-9c2a-3f22f2619d1a","Type":"ContainerStarted","Data":"c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4"} Jan 26 10:55:47 crc kubenswrapper[5003]: I0126 10:55:47.637307 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:53 crc kubenswrapper[5003]: I0126 10:55:53.353675 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 10:55:53 crc kubenswrapper[5003]: I0126 10:55:53.374902 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" podStartSLOduration=7.029718193 podStartE2EDuration="10.374884513s" podCreationTimestamp="2026-01-26 10:55:43 +0000 UTC" firstStartedPulling="2026-01-26 10:55:43.641220875 +0000 UTC m=+759.182446436" lastFinishedPulling="2026-01-26 10:55:46.986387195 +0000 UTC m=+762.527612756" observedRunningTime="2026-01-26 10:55:47.663144265 +0000 UTC m=+763.204369836" watchObservedRunningTime="2026-01-26 10:55:53.374884513 +0000 UTC m=+768.916110074" Jan 26 10:55:58 crc kubenswrapper[5003]: I0126 10:55:58.565920 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-4ktsf"] Jan 26 10:55:58 crc kubenswrapper[5003]: I0126 10:55:58.566999 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4ktsf" Jan 26 10:55:58 crc kubenswrapper[5003]: I0126 10:55:58.570912 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-ffhsq" Jan 26 10:55:58 crc kubenswrapper[5003]: I0126 10:55:58.577719 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-4ktsf"] Jan 26 10:55:58 crc kubenswrapper[5003]: I0126 10:55:58.623519 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzfwl\" (UniqueName: \"kubernetes.io/projected/08ae1eee-e34a-417a-9b98-68a61d48dba9-kube-api-access-nzfwl\") pod \"infra-operator-index-4ktsf\" (UID: \"08ae1eee-e34a-417a-9b98-68a61d48dba9\") " pod="openstack-operators/infra-operator-index-4ktsf" Jan 26 10:55:58 crc kubenswrapper[5003]: I0126 10:55:58.724238 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzfwl\" (UniqueName: \"kubernetes.io/projected/08ae1eee-e34a-417a-9b98-68a61d48dba9-kube-api-access-nzfwl\") pod \"infra-operator-index-4ktsf\" (UID: \"08ae1eee-e34a-417a-9b98-68a61d48dba9\") " pod="openstack-operators/infra-operator-index-4ktsf" Jan 26 10:55:58 crc kubenswrapper[5003]: I0126 10:55:58.747081 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzfwl\" (UniqueName: \"kubernetes.io/projected/08ae1eee-e34a-417a-9b98-68a61d48dba9-kube-api-access-nzfwl\") pod \"infra-operator-index-4ktsf\" (UID: \"08ae1eee-e34a-417a-9b98-68a61d48dba9\") " pod="openstack-operators/infra-operator-index-4ktsf" Jan 26 10:55:58 crc kubenswrapper[5003]: I0126 10:55:58.882810 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4ktsf" Jan 26 10:55:59 crc kubenswrapper[5003]: I0126 10:55:59.144340 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-4ktsf"] Jan 26 10:55:59 crc kubenswrapper[5003]: I0126 10:55:59.709744 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4ktsf" event={"ID":"08ae1eee-e34a-417a-9b98-68a61d48dba9","Type":"ContainerStarted","Data":"0f8590de9a2a08b818d052e65f02e0403e1af32e4c5d009ab9cd322919ae4615"} Jan 26 10:56:00 crc kubenswrapper[5003]: I0126 10:56:00.719675 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4ktsf" event={"ID":"08ae1eee-e34a-417a-9b98-68a61d48dba9","Type":"ContainerStarted","Data":"270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274"} Jan 26 10:56:00 crc kubenswrapper[5003]: I0126 10:56:00.744260 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-4ktsf" podStartSLOduration=1.981172811 podStartE2EDuration="2.744232165s" podCreationTimestamp="2026-01-26 10:55:58 +0000 UTC" firstStartedPulling="2026-01-26 10:55:59.15331123 +0000 UTC m=+774.694536791" lastFinishedPulling="2026-01-26 10:55:59.916370584 +0000 UTC m=+775.457596145" observedRunningTime="2026-01-26 10:56:00.738619755 +0000 UTC m=+776.279845356" watchObservedRunningTime="2026-01-26 10:56:00.744232165 +0000 UTC m=+776.285457756" Jan 26 10:56:02 crc kubenswrapper[5003]: I0126 10:56:02.764111 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-4ktsf"] Jan 26 10:56:02 crc kubenswrapper[5003]: I0126 10:56:02.764667 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-4ktsf" podUID="08ae1eee-e34a-417a-9b98-68a61d48dba9" containerName="registry-server" containerID="cri-o://270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274" gracePeriod=2 Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.170532 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4ktsf" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.282180 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzfwl\" (UniqueName: \"kubernetes.io/projected/08ae1eee-e34a-417a-9b98-68a61d48dba9-kube-api-access-nzfwl\") pod \"08ae1eee-e34a-417a-9b98-68a61d48dba9\" (UID: \"08ae1eee-e34a-417a-9b98-68a61d48dba9\") " Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.286918 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08ae1eee-e34a-417a-9b98-68a61d48dba9-kube-api-access-nzfwl" (OuterVolumeSpecName: "kube-api-access-nzfwl") pod "08ae1eee-e34a-417a-9b98-68a61d48dba9" (UID: "08ae1eee-e34a-417a-9b98-68a61d48dba9"). InnerVolumeSpecName "kube-api-access-nzfwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.370351 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-djwjt"] Jan 26 10:56:03 crc kubenswrapper[5003]: E0126 10:56:03.371422 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08ae1eee-e34a-417a-9b98-68a61d48dba9" containerName="registry-server" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.371439 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="08ae1eee-e34a-417a-9b98-68a61d48dba9" containerName="registry-server" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.371576 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="08ae1eee-e34a-417a-9b98-68a61d48dba9" containerName="registry-server" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.375990 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.380844 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-djwjt"] Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.383447 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6xsp\" (UniqueName: \"kubernetes.io/projected/9e455f13-32bb-4f60-9624-678d440683ac-kube-api-access-s6xsp\") pod \"infra-operator-index-djwjt\" (UID: \"9e455f13-32bb-4f60-9624-678d440683ac\") " pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.383613 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzfwl\" (UniqueName: \"kubernetes.io/projected/08ae1eee-e34a-417a-9b98-68a61d48dba9-kube-api-access-nzfwl\") on node \"crc\" DevicePath \"\"" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.484581 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6xsp\" (UniqueName: \"kubernetes.io/projected/9e455f13-32bb-4f60-9624-678d440683ac-kube-api-access-s6xsp\") pod \"infra-operator-index-djwjt\" (UID: \"9e455f13-32bb-4f60-9624-678d440683ac\") " pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.504900 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6xsp\" (UniqueName: \"kubernetes.io/projected/9e455f13-32bb-4f60-9624-678d440683ac-kube-api-access-s6xsp\") pod \"infra-operator-index-djwjt\" (UID: \"9e455f13-32bb-4f60-9624-678d440683ac\") " pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.709901 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.737964 5003 generic.go:334] "Generic (PLEG): container finished" podID="08ae1eee-e34a-417a-9b98-68a61d48dba9" containerID="270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274" exitCode=0 Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.738025 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4ktsf" event={"ID":"08ae1eee-e34a-417a-9b98-68a61d48dba9","Type":"ContainerDied","Data":"270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274"} Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.738072 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4ktsf" event={"ID":"08ae1eee-e34a-417a-9b98-68a61d48dba9","Type":"ContainerDied","Data":"0f8590de9a2a08b818d052e65f02e0403e1af32e4c5d009ab9cd322919ae4615"} Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.738104 5003 scope.go:117] "RemoveContainer" containerID="270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.738302 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4ktsf" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.775254 5003 scope.go:117] "RemoveContainer" containerID="270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274" Jan 26 10:56:03 crc kubenswrapper[5003]: E0126 10:56:03.776447 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274\": container with ID starting with 270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274 not found: ID does not exist" containerID="270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.776491 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274"} err="failed to get container status \"270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274\": rpc error: code = NotFound desc = could not find container \"270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274\": container with ID starting with 270e6060ab7b2d77cd910de7a39418b34751ba0dbec6189b910e42a040cdb274 not found: ID does not exist" Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.777471 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-4ktsf"] Jan 26 10:56:03 crc kubenswrapper[5003]: I0126 10:56:03.782515 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-4ktsf"] Jan 26 10:56:04 crc kubenswrapper[5003]: I0126 10:56:04.128526 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-djwjt"] Jan 26 10:56:04 crc kubenswrapper[5003]: I0126 10:56:04.753160 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-djwjt" event={"ID":"9e455f13-32bb-4f60-9624-678d440683ac","Type":"ContainerStarted","Data":"b90a03dc72c1189b5000be57ca6aa9edb43390bd4bfa5da19e5f3b1a113f3354"} Jan 26 10:56:05 crc kubenswrapper[5003]: I0126 10:56:05.013189 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08ae1eee-e34a-417a-9b98-68a61d48dba9" path="/var/lib/kubelet/pods/08ae1eee-e34a-417a-9b98-68a61d48dba9/volumes" Jan 26 10:56:05 crc kubenswrapper[5003]: I0126 10:56:05.761181 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-djwjt" event={"ID":"9e455f13-32bb-4f60-9624-678d440683ac","Type":"ContainerStarted","Data":"d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99"} Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.040037 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.040434 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.040510 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.041506 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4dd55168d07d12b4dda1e126f43b86ddabeac34b8ea63b9c2a281cb6276edb9b"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.041591 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://4dd55168d07d12b4dda1e126f43b86ddabeac34b8ea63b9c2a281cb6276edb9b" gracePeriod=600 Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.789164 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="4dd55168d07d12b4dda1e126f43b86ddabeac34b8ea63b9c2a281cb6276edb9b" exitCode=0 Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.789208 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"4dd55168d07d12b4dda1e126f43b86ddabeac34b8ea63b9c2a281cb6276edb9b"} Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.789233 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"f3eab31ad2a64d16b429c7fff6c1ada069433f73eabf4567b3026431fe989a0c"} Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.789271 5003 scope.go:117] "RemoveContainer" containerID="4c0aa82b43e2be72d561c0781bb825479f5b77be8b957270d8337126584ed98e" Jan 26 10:56:09 crc kubenswrapper[5003]: I0126 10:56:09.812149 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-djwjt" podStartSLOduration=6.155966261 podStartE2EDuration="6.812118574s" podCreationTimestamp="2026-01-26 10:56:03 +0000 UTC" firstStartedPulling="2026-01-26 10:56:04.138778558 +0000 UTC m=+779.680004119" lastFinishedPulling="2026-01-26 10:56:04.794930871 +0000 UTC m=+780.336156432" observedRunningTime="2026-01-26 10:56:05.775884088 +0000 UTC m=+781.317109689" watchObservedRunningTime="2026-01-26 10:56:09.812118574 +0000 UTC m=+785.353344175" Jan 26 10:56:13 crc kubenswrapper[5003]: I0126 10:56:13.710951 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:13 crc kubenswrapper[5003]: I0126 10:56:13.711253 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:13 crc kubenswrapper[5003]: I0126 10:56:13.737853 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:13 crc kubenswrapper[5003]: I0126 10:56:13.839716 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-djwjt" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.415922 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j"] Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.417509 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.421170 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-x8f6q" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.423870 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j"] Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.440185 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5prpz\" (UniqueName: \"kubernetes.io/projected/fc780c04-9290-4f09-bcca-8777b3713c86-kube-api-access-5prpz\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.440381 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-util\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.440469 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-bundle\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.542062 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5prpz\" (UniqueName: \"kubernetes.io/projected/fc780c04-9290-4f09-bcca-8777b3713c86-kube-api-access-5prpz\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.542184 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-util\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.542233 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-bundle\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.543133 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-util\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.543141 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-bundle\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.571157 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5prpz\" (UniqueName: \"kubernetes.io/projected/fc780c04-9290-4f09-bcca-8777b3713c86-kube-api-access-5prpz\") pod \"ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:15 crc kubenswrapper[5003]: I0126 10:56:15.737371 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:16 crc kubenswrapper[5003]: I0126 10:56:16.152672 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j"] Jan 26 10:56:16 crc kubenswrapper[5003]: I0126 10:56:16.838910 5003 generic.go:334] "Generic (PLEG): container finished" podID="fc780c04-9290-4f09-bcca-8777b3713c86" containerID="beed7503f1f5dfa9ee2d0fbea0fd91709abfe7b9e292ac30f92241f0e12e327f" exitCode=0 Jan 26 10:56:16 crc kubenswrapper[5003]: I0126 10:56:16.838975 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" event={"ID":"fc780c04-9290-4f09-bcca-8777b3713c86","Type":"ContainerDied","Data":"beed7503f1f5dfa9ee2d0fbea0fd91709abfe7b9e292ac30f92241f0e12e327f"} Jan 26 10:56:16 crc kubenswrapper[5003]: I0126 10:56:16.839006 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" event={"ID":"fc780c04-9290-4f09-bcca-8777b3713c86","Type":"ContainerStarted","Data":"df32a14a832549b157c84b2a6b2154204bd6c33303ce70bf787eeda0e20a166b"} Jan 26 10:56:18 crc kubenswrapper[5003]: I0126 10:56:18.853256 5003 generic.go:334] "Generic (PLEG): container finished" podID="fc780c04-9290-4f09-bcca-8777b3713c86" containerID="3ff0af062e7fe30a5bd7967c45880a0510d64035e54c03f0f47f34ec58a93842" exitCode=0 Jan 26 10:56:18 crc kubenswrapper[5003]: I0126 10:56:18.853318 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" event={"ID":"fc780c04-9290-4f09-bcca-8777b3713c86","Type":"ContainerDied","Data":"3ff0af062e7fe30a5bd7967c45880a0510d64035e54c03f0f47f34ec58a93842"} Jan 26 10:56:19 crc kubenswrapper[5003]: I0126 10:56:19.859653 5003 generic.go:334] "Generic (PLEG): container finished" podID="fc780c04-9290-4f09-bcca-8777b3713c86" containerID="30c98d7026a10ccac11fbf49a5ac20f2cef4ed5e04d7647aa6e79e4d0237ac7a" exitCode=0 Jan 26 10:56:19 crc kubenswrapper[5003]: I0126 10:56:19.859709 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" event={"ID":"fc780c04-9290-4f09-bcca-8777b3713c86","Type":"ContainerDied","Data":"30c98d7026a10ccac11fbf49a5ac20f2cef4ed5e04d7647aa6e79e4d0237ac7a"} Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.189582 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.315580 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5prpz\" (UniqueName: \"kubernetes.io/projected/fc780c04-9290-4f09-bcca-8777b3713c86-kube-api-access-5prpz\") pod \"fc780c04-9290-4f09-bcca-8777b3713c86\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.315699 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-util\") pod \"fc780c04-9290-4f09-bcca-8777b3713c86\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.315722 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-bundle\") pod \"fc780c04-9290-4f09-bcca-8777b3713c86\" (UID: \"fc780c04-9290-4f09-bcca-8777b3713c86\") " Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.318095 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-bundle" (OuterVolumeSpecName: "bundle") pod "fc780c04-9290-4f09-bcca-8777b3713c86" (UID: "fc780c04-9290-4f09-bcca-8777b3713c86"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.321473 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc780c04-9290-4f09-bcca-8777b3713c86-kube-api-access-5prpz" (OuterVolumeSpecName: "kube-api-access-5prpz") pod "fc780c04-9290-4f09-bcca-8777b3713c86" (UID: "fc780c04-9290-4f09-bcca-8777b3713c86"). InnerVolumeSpecName "kube-api-access-5prpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.337950 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-util" (OuterVolumeSpecName: "util") pod "fc780c04-9290-4f09-bcca-8777b3713c86" (UID: "fc780c04-9290-4f09-bcca-8777b3713c86"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.416733 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5prpz\" (UniqueName: \"kubernetes.io/projected/fc780c04-9290-4f09-bcca-8777b3713c86-kube-api-access-5prpz\") on node \"crc\" DevicePath \"\"" Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.416775 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-util\") on node \"crc\" DevicePath \"\"" Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.416789 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fc780c04-9290-4f09-bcca-8777b3713c86-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.874128 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" event={"ID":"fc780c04-9290-4f09-bcca-8777b3713c86","Type":"ContainerDied","Data":"df32a14a832549b157c84b2a6b2154204bd6c33303ce70bf787eeda0e20a166b"} Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.874180 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df32a14a832549b157c84b2a6b2154204bd6c33303ce70bf787eeda0e20a166b" Jan 26 10:56:21 crc kubenswrapper[5003]: I0126 10:56:21.874189 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.823945 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Jan 26 10:56:28 crc kubenswrapper[5003]: E0126 10:56:28.824601 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc780c04-9290-4f09-bcca-8777b3713c86" containerName="extract" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.824612 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc780c04-9290-4f09-bcca-8777b3713c86" containerName="extract" Jan 26 10:56:28 crc kubenswrapper[5003]: E0126 10:56:28.824627 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc780c04-9290-4f09-bcca-8777b3713c86" containerName="util" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.824633 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc780c04-9290-4f09-bcca-8777b3713c86" containerName="util" Jan 26 10:56:28 crc kubenswrapper[5003]: E0126 10:56:28.824648 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc780c04-9290-4f09-bcca-8777b3713c86" containerName="pull" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.824654 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc780c04-9290-4f09-bcca-8777b3713c86" containerName="pull" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.824757 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc780c04-9290-4f09-bcca-8777b3713c86" containerName="extract" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.825314 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.827389 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openshift-service-ca.crt" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.827428 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openstack-scripts" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.829132 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openstack-config-data" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.829307 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"galera-openstack-dockercfg-kkmfn" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.835673 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.836917 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.841552 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"kube-root-ca.crt" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.843915 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.851908 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.853368 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.856738 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Jan 26 10:56:28 crc kubenswrapper[5003]: I0126 10:56:28.862052 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010733 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010769 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-default\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010794 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-operator-scripts\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010830 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d62gq\" (UniqueName: \"kubernetes.io/projected/5d8bd836-ef6c-425f-b570-69c53560c715-kube-api-access-d62gq\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010889 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-generated\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010915 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-kolla-config\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010932 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010958 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-operator-scripts\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010980 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-generated\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.010999 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcxzx\" (UniqueName: \"kubernetes.io/projected/261ebec9-25ad-4434-bf06-3feeee0f0eff-kube-api-access-dcxzx\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.011019 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kolla-config\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.011041 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.011067 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.011127 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-default\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.011157 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4khmx\" (UniqueName: \"kubernetes.io/projected/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kube-api-access-4khmx\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.011250 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-kolla-config\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.011334 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.011358 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-default\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112094 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112136 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-default\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112184 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-operator-scripts\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112238 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d62gq\" (UniqueName: \"kubernetes.io/projected/5d8bd836-ef6c-425f-b570-69c53560c715-kube-api-access-d62gq\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112267 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-generated\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112329 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-kolla-config\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112363 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112397 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-operator-scripts\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112415 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcxzx\" (UniqueName: \"kubernetes.io/projected/261ebec9-25ad-4434-bf06-3feeee0f0eff-kube-api-access-dcxzx\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112431 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-generated\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kolla-config\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112461 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112494 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112542 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-default\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112591 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4khmx\" (UniqueName: \"kubernetes.io/projected/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kube-api-access-4khmx\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112621 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-kolla-config\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112642 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112656 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-default\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112652 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") device mount path \"/mnt/openstack/pv04\"" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.112539 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") device mount path \"/mnt/openstack/pv01\"" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.113120 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") device mount path \"/mnt/openstack/pv08\"" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.113146 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-default\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.113156 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-kolla-config\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.113394 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-generated\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.113593 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-generated\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.113750 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.114015 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-default\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.114088 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-kolla-config\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.114204 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kolla-config\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.114262 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-operator-scripts\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.114518 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-default\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.114785 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-operator-scripts\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.115106 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.138077 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.148154 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcxzx\" (UniqueName: \"kubernetes.io/projected/261ebec9-25ad-4434-bf06-3feeee0f0eff-kube-api-access-dcxzx\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.149253 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d62gq\" (UniqueName: \"kubernetes.io/projected/5d8bd836-ef6c-425f-b570-69c53560c715-kube-api-access-d62gq\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.151847 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4khmx\" (UniqueName: \"kubernetes.io/projected/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kube-api-access-4khmx\") pod \"openstack-galera-2\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.158679 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.160489 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.172594 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-1\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.443610 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.469268 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.642226 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Jan 26 10:56:29 crc kubenswrapper[5003]: W0126 10:56:29.651825 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd511f4f_c18a_4f7c_8fb9_1d760a3039ac.slice/crio-d45a1ad40bdd7306108d530ec90168a01807bd506e2822c4c99f7d69f0882793 WatchSource:0}: Error finding container d45a1ad40bdd7306108d530ec90168a01807bd506e2822c4c99f7d69f0882793: Status 404 returned error can't find the container with id d45a1ad40bdd7306108d530ec90168a01807bd506e2822c4c99f7d69f0882793 Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.773036 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.874672 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Jan 26 10:56:29 crc kubenswrapper[5003]: W0126 10:56:29.878303 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d8bd836_ef6c_425f_b570_69c53560c715.slice/crio-8c7875a2f296334cefa0a94ad01ba3d72b9e9ca552e67acbcc2cd2ff21230a9a WatchSource:0}: Error finding container 8c7875a2f296334cefa0a94ad01ba3d72b9e9ca552e67acbcc2cd2ff21230a9a: Status 404 returned error can't find the container with id 8c7875a2f296334cefa0a94ad01ba3d72b9e9ca552e67acbcc2cd2ff21230a9a Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.921172 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac","Type":"ContainerStarted","Data":"d45a1ad40bdd7306108d530ec90168a01807bd506e2822c4c99f7d69f0882793"} Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.922106 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"261ebec9-25ad-4434-bf06-3feeee0f0eff","Type":"ContainerStarted","Data":"663c64d0a7a356719ff3d3dc14f6da756a5ee6e383f90eb429faf70ae5011476"} Jan 26 10:56:29 crc kubenswrapper[5003]: I0126 10:56:29.922881 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"5d8bd836-ef6c-425f-b570-69c53560c715","Type":"ContainerStarted","Data":"8c7875a2f296334cefa0a94ad01ba3d72b9e9ca552e67acbcc2cd2ff21230a9a"} Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.632058 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc"] Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.640573 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.643313 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-r6gs2" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.643576 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.643982 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc"] Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.733743 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-webhook-cert\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.733796 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-apiservice-cert\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.733824 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb67b\" (UniqueName: \"kubernetes.io/projected/0460124e-d3ec-4069-ad36-914a93ef06cb-kube-api-access-gb67b\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.847905 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-webhook-cert\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.847973 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-apiservice-cert\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.847998 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb67b\" (UniqueName: \"kubernetes.io/projected/0460124e-d3ec-4069-ad36-914a93ef06cb-kube-api-access-gb67b\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.862305 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-webhook-cert\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.862997 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-apiservice-cert\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.871491 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb67b\" (UniqueName: \"kubernetes.io/projected/0460124e-d3ec-4069-ad36-914a93ef06cb-kube-api-access-gb67b\") pod \"infra-operator-controller-manager-fc4cd6d84-shrbc\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:30 crc kubenswrapper[5003]: I0126 10:56:30.984027 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:31 crc kubenswrapper[5003]: I0126 10:56:31.296511 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc"] Jan 26 10:56:31 crc kubenswrapper[5003]: I0126 10:56:31.972651 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" event={"ID":"0460124e-d3ec-4069-ad36-914a93ef06cb","Type":"ContainerStarted","Data":"94a58289de7c560c787497f43bde652356879ba1befae823514ac284be2ae08b"} Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.045902 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.046541 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d62gq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_swift-kuttl-tests(5d8bd836-ef6c-425f-b570-69c53560c715): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.047742 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="swift-kuttl-tests/openstack-galera-0" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" Jan 26 10:56:47 crc kubenswrapper[5003]: I0126 10:56:47.082031 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" event={"ID":"0460124e-d3ec-4069-ad36-914a93ef06cb","Type":"ContainerStarted","Data":"996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc"} Jan 26 10:56:47 crc kubenswrapper[5003]: I0126 10:56:47.082168 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.083760 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="swift-kuttl-tests/openstack-galera-0" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" Jan 26 10:56:47 crc kubenswrapper[5003]: I0126 10:56:47.107855 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" podStartSLOduration=1.652068412 podStartE2EDuration="17.10783347s" podCreationTimestamp="2026-01-26 10:56:30 +0000 UTC" firstStartedPulling="2026-01-26 10:56:31.312970362 +0000 UTC m=+806.854195923" lastFinishedPulling="2026-01-26 10:56:46.76873542 +0000 UTC m=+822.309960981" observedRunningTime="2026-01-26 10:56:47.10325864 +0000 UTC m=+822.644484201" watchObservedRunningTime="2026-01-26 10:56:47.10783347 +0000 UTC m=+822.649059031" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.145316 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.145501 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4khmx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-2_swift-kuttl-tests(bd511f4f-c18a-4f7c-8fb9-1d760a3039ac): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.147089 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="swift-kuttl-tests/openstack-galera-2" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.987270 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.988036 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dcxzx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-1_swift-kuttl-tests(261ebec9-25ad-4434-bf06-3feeee0f0eff): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 10:56:47 crc kubenswrapper[5003]: E0126 10:56:47.989409 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="swift-kuttl-tests/openstack-galera-1" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" Jan 26 10:56:48 crc kubenswrapper[5003]: E0126 10:56:48.090778 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="swift-kuttl-tests/openstack-galera-2" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" Jan 26 10:56:48 crc kubenswrapper[5003]: E0126 10:56:48.092879 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="swift-kuttl-tests/openstack-galera-1" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" Jan 26 10:57:00 crc kubenswrapper[5003]: I0126 10:57:00.992637 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.166529 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/memcached-0"] Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.167354 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.171207 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"memcached-config-data" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.171214 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"memcached-memcached-dockercfg-sflnl" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.191903 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/memcached-0"] Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.235566 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kolla-config\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.235632 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6s5n\" (UniqueName: \"kubernetes.io/projected/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kube-api-access-z6s5n\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.235661 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-config-data\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.336626 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kolla-config\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.336718 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6s5n\" (UniqueName: \"kubernetes.io/projected/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kube-api-access-z6s5n\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.336750 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-config-data\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.337623 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kolla-config\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.337720 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-config-data\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.357233 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6s5n\" (UniqueName: \"kubernetes.io/projected/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kube-api-access-z6s5n\") pod \"memcached-0\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:02 crc kubenswrapper[5003]: I0126 10:57:02.490591 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:03 crc kubenswrapper[5003]: I0126 10:57:03.646934 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/memcached-0"] Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.185687 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"5d8bd836-ef6c-425f-b570-69c53560c715","Type":"ContainerStarted","Data":"694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022"} Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.188365 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac","Type":"ContainerStarted","Data":"16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd"} Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.190053 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"261ebec9-25ad-4434-bf06-3feeee0f0eff","Type":"ContainerStarted","Data":"a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2"} Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.191145 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"7210ca64-60f9-4e11-bd2c-6e4905b0b948","Type":"ContainerStarted","Data":"41f340c59885c338f2aa1019cff82a5b2d20b97ca2b10935fd0ce3c710bd26fb"} Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.895776 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-w2mgm"] Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.898074 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.899936 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-8pt9s" Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.904982 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-w2mgm"] Jan 26 10:57:04 crc kubenswrapper[5003]: I0126 10:57:04.976273 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxcfs\" (UniqueName: \"kubernetes.io/projected/6e1b026a-48c5-4258-af65-65b701f38e26-kube-api-access-vxcfs\") pod \"rabbitmq-cluster-operator-index-w2mgm\" (UID: \"6e1b026a-48c5-4258-af65-65b701f38e26\") " pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" Jan 26 10:57:05 crc kubenswrapper[5003]: I0126 10:57:05.077421 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxcfs\" (UniqueName: \"kubernetes.io/projected/6e1b026a-48c5-4258-af65-65b701f38e26-kube-api-access-vxcfs\") pod \"rabbitmq-cluster-operator-index-w2mgm\" (UID: \"6e1b026a-48c5-4258-af65-65b701f38e26\") " pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" Jan 26 10:57:05 crc kubenswrapper[5003]: I0126 10:57:05.101346 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxcfs\" (UniqueName: \"kubernetes.io/projected/6e1b026a-48c5-4258-af65-65b701f38e26-kube-api-access-vxcfs\") pod \"rabbitmq-cluster-operator-index-w2mgm\" (UID: \"6e1b026a-48c5-4258-af65-65b701f38e26\") " pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" Jan 26 10:57:05 crc kubenswrapper[5003]: I0126 10:57:05.222110 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-8pt9s" Jan 26 10:57:05 crc kubenswrapper[5003]: I0126 10:57:05.230645 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" Jan 26 10:57:06 crc kubenswrapper[5003]: I0126 10:57:06.144495 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-w2mgm"] Jan 26 10:57:06 crc kubenswrapper[5003]: W0126 10:57:06.154503 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e1b026a_48c5_4258_af65_65b701f38e26.slice/crio-bc6245ddc0de40dc1e935cb4eed024d9c6776a2c097519af568b210020d8a7bb WatchSource:0}: Error finding container bc6245ddc0de40dc1e935cb4eed024d9c6776a2c097519af568b210020d8a7bb: Status 404 returned error can't find the container with id bc6245ddc0de40dc1e935cb4eed024d9c6776a2c097519af568b210020d8a7bb Jan 26 10:57:06 crc kubenswrapper[5003]: I0126 10:57:06.202526 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" event={"ID":"6e1b026a-48c5-4258-af65-65b701f38e26","Type":"ContainerStarted","Data":"bc6245ddc0de40dc1e935cb4eed024d9c6776a2c097519af568b210020d8a7bb"} Jan 26 10:57:06 crc kubenswrapper[5003]: I0126 10:57:06.203886 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"7210ca64-60f9-4e11-bd2c-6e4905b0b948","Type":"ContainerStarted","Data":"9d0ea22f8d5cbef2b6ac5abbfaa6920191e268f9920611f04794c7edaa2d3ed9"} Jan 26 10:57:06 crc kubenswrapper[5003]: I0126 10:57:06.204067 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:06 crc kubenswrapper[5003]: I0126 10:57:06.221215 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/memcached-0" podStartSLOduration=2.099485169 podStartE2EDuration="4.221193484s" podCreationTimestamp="2026-01-26 10:57:02 +0000 UTC" firstStartedPulling="2026-01-26 10:57:03.65733799 +0000 UTC m=+839.198563551" lastFinishedPulling="2026-01-26 10:57:05.779046305 +0000 UTC m=+841.320271866" observedRunningTime="2026-01-26 10:57:06.218548339 +0000 UTC m=+841.759773900" watchObservedRunningTime="2026-01-26 10:57:06.221193484 +0000 UTC m=+841.762419045" Jan 26 10:57:08 crc kubenswrapper[5003]: I0126 10:57:08.220663 5003 generic.go:334] "Generic (PLEG): container finished" podID="5d8bd836-ef6c-425f-b570-69c53560c715" containerID="694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022" exitCode=0 Jan 26 10:57:08 crc kubenswrapper[5003]: I0126 10:57:08.220821 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"5d8bd836-ef6c-425f-b570-69c53560c715","Type":"ContainerDied","Data":"694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022"} Jan 26 10:57:08 crc kubenswrapper[5003]: I0126 10:57:08.229182 5003 generic.go:334] "Generic (PLEG): container finished" podID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerID="16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd" exitCode=0 Jan 26 10:57:08 crc kubenswrapper[5003]: I0126 10:57:08.229266 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac","Type":"ContainerDied","Data":"16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd"} Jan 26 10:57:08 crc kubenswrapper[5003]: I0126 10:57:08.231686 5003 generic.go:334] "Generic (PLEG): container finished" podID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerID="a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2" exitCode=0 Jan 26 10:57:08 crc kubenswrapper[5003]: I0126 10:57:08.231717 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"261ebec9-25ad-4434-bf06-3feeee0f0eff","Type":"ContainerDied","Data":"a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2"} Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.238732 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"261ebec9-25ad-4434-bf06-3feeee0f0eff","Type":"ContainerStarted","Data":"cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646"} Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.243564 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"5d8bd836-ef6c-425f-b570-69c53560c715","Type":"ContainerStarted","Data":"5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb"} Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.246368 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac","Type":"ContainerStarted","Data":"e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f"} Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.248609 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" event={"ID":"6e1b026a-48c5-4258-af65-65b701f38e26","Type":"ContainerStarted","Data":"be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17"} Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.262351 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-1" podStartSLOduration=-9223371994.592447 podStartE2EDuration="42.262329298s" podCreationTimestamp="2026-01-26 10:56:27 +0000 UTC" firstStartedPulling="2026-01-26 10:56:29.782127345 +0000 UTC m=+805.323352906" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:57:09.258245252 +0000 UTC m=+844.799470833" watchObservedRunningTime="2026-01-26 10:57:09.262329298 +0000 UTC m=+844.803554859" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.280817 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-w2mgm"] Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.283991 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-0" podStartSLOduration=8.89154897 podStartE2EDuration="42.283975435s" podCreationTimestamp="2026-01-26 10:56:27 +0000 UTC" firstStartedPulling="2026-01-26 10:56:29.880612308 +0000 UTC m=+805.421837859" lastFinishedPulling="2026-01-26 10:57:03.273038763 +0000 UTC m=+838.814264324" observedRunningTime="2026-01-26 10:57:09.276249005 +0000 UTC m=+844.817474566" watchObservedRunningTime="2026-01-26 10:57:09.283975435 +0000 UTC m=+844.825200996" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.336537 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-2" podStartSLOduration=8.721717478 podStartE2EDuration="42.336513901s" podCreationTimestamp="2026-01-26 10:56:27 +0000 UTC" firstStartedPulling="2026-01-26 10:56:29.657019135 +0000 UTC m=+805.198244696" lastFinishedPulling="2026-01-26 10:57:03.271815558 +0000 UTC m=+838.813041119" observedRunningTime="2026-01-26 10:57:09.333919307 +0000 UTC m=+844.875144878" watchObservedRunningTime="2026-01-26 10:57:09.336513901 +0000 UTC m=+844.877739462" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.338066 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" podStartSLOduration=2.448228928 podStartE2EDuration="5.338058825s" podCreationTimestamp="2026-01-26 10:57:04 +0000 UTC" firstStartedPulling="2026-01-26 10:57:06.155797482 +0000 UTC m=+841.697023043" lastFinishedPulling="2026-01-26 10:57:09.045627379 +0000 UTC m=+844.586852940" observedRunningTime="2026-01-26 10:57:09.299386613 +0000 UTC m=+844.840612174" watchObservedRunningTime="2026-01-26 10:57:09.338058825 +0000 UTC m=+844.879284386" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.444449 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.444771 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.469682 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.469730 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.906857 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tqpvl"] Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.907574 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:09 crc kubenswrapper[5003]: I0126 10:57:09.949777 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tqpvl"] Jan 26 10:57:10 crc kubenswrapper[5003]: I0126 10:57:10.063880 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gzgp\" (UniqueName: \"kubernetes.io/projected/7e38fee1-5f72-4d2e-9db9-64bd94887318-kube-api-access-2gzgp\") pod \"rabbitmq-cluster-operator-index-tqpvl\" (UID: \"7e38fee1-5f72-4d2e-9db9-64bd94887318\") " pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:10 crc kubenswrapper[5003]: I0126 10:57:10.165057 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gzgp\" (UniqueName: \"kubernetes.io/projected/7e38fee1-5f72-4d2e-9db9-64bd94887318-kube-api-access-2gzgp\") pod \"rabbitmq-cluster-operator-index-tqpvl\" (UID: \"7e38fee1-5f72-4d2e-9db9-64bd94887318\") " pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:10 crc kubenswrapper[5003]: I0126 10:57:10.185526 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gzgp\" (UniqueName: \"kubernetes.io/projected/7e38fee1-5f72-4d2e-9db9-64bd94887318-kube-api-access-2gzgp\") pod \"rabbitmq-cluster-operator-index-tqpvl\" (UID: \"7e38fee1-5f72-4d2e-9db9-64bd94887318\") " pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:10 crc kubenswrapper[5003]: I0126 10:57:10.228146 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:10 crc kubenswrapper[5003]: I0126 10:57:10.494639 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tqpvl"] Jan 26 10:57:10 crc kubenswrapper[5003]: W0126 10:57:10.506961 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e38fee1_5f72_4d2e_9db9_64bd94887318.slice/crio-a8cbcf74c56158a2a3bcab0cc4ab20e1abf0ce01d8abc64e6daf815f472693f8 WatchSource:0}: Error finding container a8cbcf74c56158a2a3bcab0cc4ab20e1abf0ce01d8abc64e6daf815f472693f8: Status 404 returned error can't find the container with id a8cbcf74c56158a2a3bcab0cc4ab20e1abf0ce01d8abc64e6daf815f472693f8 Jan 26 10:57:11 crc kubenswrapper[5003]: I0126 10:57:11.261249 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" event={"ID":"7e38fee1-5f72-4d2e-9db9-64bd94887318","Type":"ContainerStarted","Data":"43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f"} Jan 26 10:57:11 crc kubenswrapper[5003]: I0126 10:57:11.261735 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" event={"ID":"7e38fee1-5f72-4d2e-9db9-64bd94887318","Type":"ContainerStarted","Data":"a8cbcf74c56158a2a3bcab0cc4ab20e1abf0ce01d8abc64e6daf815f472693f8"} Jan 26 10:57:11 crc kubenswrapper[5003]: I0126 10:57:11.261353 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" podUID="6e1b026a-48c5-4258-af65-65b701f38e26" containerName="registry-server" containerID="cri-o://be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17" gracePeriod=2 Jan 26 10:57:11 crc kubenswrapper[5003]: I0126 10:57:11.277393 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" podStartSLOduration=1.874556291 podStartE2EDuration="2.277370819s" podCreationTimestamp="2026-01-26 10:57:09 +0000 UTC" firstStartedPulling="2026-01-26 10:57:10.513497461 +0000 UTC m=+846.054723022" lastFinishedPulling="2026-01-26 10:57:10.916311989 +0000 UTC m=+846.457537550" observedRunningTime="2026-01-26 10:57:11.275881216 +0000 UTC m=+846.817106777" watchObservedRunningTime="2026-01-26 10:57:11.277370819 +0000 UTC m=+846.818596380" Jan 26 10:57:11 crc kubenswrapper[5003]: I0126 10:57:11.856342 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" Jan 26 10:57:11 crc kubenswrapper[5003]: I0126 10:57:11.990553 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxcfs\" (UniqueName: \"kubernetes.io/projected/6e1b026a-48c5-4258-af65-65b701f38e26-kube-api-access-vxcfs\") pod \"6e1b026a-48c5-4258-af65-65b701f38e26\" (UID: \"6e1b026a-48c5-4258-af65-65b701f38e26\") " Jan 26 10:57:11 crc kubenswrapper[5003]: I0126 10:57:11.996651 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e1b026a-48c5-4258-af65-65b701f38e26-kube-api-access-vxcfs" (OuterVolumeSpecName: "kube-api-access-vxcfs") pod "6e1b026a-48c5-4258-af65-65b701f38e26" (UID: "6e1b026a-48c5-4258-af65-65b701f38e26"). InnerVolumeSpecName "kube-api-access-vxcfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.091862 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxcfs\" (UniqueName: \"kubernetes.io/projected/6e1b026a-48c5-4258-af65-65b701f38e26-kube-api-access-vxcfs\") on node \"crc\" DevicePath \"\"" Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.269337 5003 generic.go:334] "Generic (PLEG): container finished" podID="6e1b026a-48c5-4258-af65-65b701f38e26" containerID="be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17" exitCode=0 Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.269395 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" event={"ID":"6e1b026a-48c5-4258-af65-65b701f38e26","Type":"ContainerDied","Data":"be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17"} Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.269439 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.269483 5003 scope.go:117] "RemoveContainer" containerID="be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17" Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.269465 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-w2mgm" event={"ID":"6e1b026a-48c5-4258-af65-65b701f38e26","Type":"ContainerDied","Data":"bc6245ddc0de40dc1e935cb4eed024d9c6776a2c097519af568b210020d8a7bb"} Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.290596 5003 scope.go:117] "RemoveContainer" containerID="be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17" Jan 26 10:57:12 crc kubenswrapper[5003]: E0126 10:57:12.291088 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17\": container with ID starting with be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17 not found: ID does not exist" containerID="be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17" Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.291138 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17"} err="failed to get container status \"be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17\": rpc error: code = NotFound desc = could not find container \"be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17\": container with ID starting with be62d9d0ccbc05949b24b7f729270adfc0164850bf36ebe258cd79e3d34cbf17 not found: ID does not exist" Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.308801 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-w2mgm"] Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.318786 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-w2mgm"] Jan 26 10:57:12 crc kubenswrapper[5003]: I0126 10:57:12.492450 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/memcached-0" Jan 26 10:57:13 crc kubenswrapper[5003]: I0126 10:57:13.009251 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e1b026a-48c5-4258-af65-65b701f38e26" path="/var/lib/kubelet/pods/6e1b026a-48c5-4258-af65-65b701f38e26/volumes" Jan 26 10:57:19 crc kubenswrapper[5003]: I0126 10:57:19.159936 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:57:19 crc kubenswrapper[5003]: I0126 10:57:19.160552 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:57:19 crc kubenswrapper[5003]: I0126 10:57:19.230020 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:57:19 crc kubenswrapper[5003]: I0126 10:57:19.384340 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 10:57:20 crc kubenswrapper[5003]: I0126 10:57:20.228933 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:20 crc kubenswrapper[5003]: I0126 10:57:20.229051 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:20 crc kubenswrapper[5003]: I0126 10:57:20.275371 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:20 crc kubenswrapper[5003]: I0126 10:57:20.350420 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.728746 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp"] Jan 26 10:57:21 crc kubenswrapper[5003]: E0126 10:57:21.728980 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e1b026a-48c5-4258-af65-65b701f38e26" containerName="registry-server" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.728991 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e1b026a-48c5-4258-af65-65b701f38e26" containerName="registry-server" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.729093 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e1b026a-48c5-4258-af65-65b701f38e26" containerName="registry-server" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.729911 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.750904 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-x8f6q" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.758195 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp"] Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.827477 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.827555 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6br4\" (UniqueName: \"kubernetes.io/projected/9532cd56-bc22-4155-ab14-7fcdc05a4748-kube-api-access-p6br4\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.827730 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.928734 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.928838 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6br4\" (UniqueName: \"kubernetes.io/projected/9532cd56-bc22-4155-ab14-7fcdc05a4748-kube-api-access-p6br4\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.928894 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.929573 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.929756 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:21 crc kubenswrapper[5003]: I0126 10:57:21.951670 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6br4\" (UniqueName: \"kubernetes.io/projected/9532cd56-bc22-4155-ab14-7fcdc05a4748-kube-api-access-p6br4\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:22 crc kubenswrapper[5003]: I0126 10:57:22.071749 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:22 crc kubenswrapper[5003]: I0126 10:57:22.498485 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp"] Jan 26 10:57:22 crc kubenswrapper[5003]: W0126 10:57:22.503415 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9532cd56_bc22_4155_ab14_7fcdc05a4748.slice/crio-803d416266634b48152c9c69390acadc75fb0e4c83ed35347378b08011b72629 WatchSource:0}: Error finding container 803d416266634b48152c9c69390acadc75fb0e4c83ed35347378b08011b72629: Status 404 returned error can't find the container with id 803d416266634b48152c9c69390acadc75fb0e4c83ed35347378b08011b72629 Jan 26 10:57:23 crc kubenswrapper[5003]: I0126 10:57:23.339360 5003 generic.go:334] "Generic (PLEG): container finished" podID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerID="6746cd0d8a9a4059c2ab789eb59c874a5a5a1fee7fe895034c3fe78272b86bad" exitCode=0 Jan 26 10:57:23 crc kubenswrapper[5003]: I0126 10:57:23.339419 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" event={"ID":"9532cd56-bc22-4155-ab14-7fcdc05a4748","Type":"ContainerDied","Data":"6746cd0d8a9a4059c2ab789eb59c874a5a5a1fee7fe895034c3fe78272b86bad"} Jan 26 10:57:23 crc kubenswrapper[5003]: I0126 10:57:23.339459 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" event={"ID":"9532cd56-bc22-4155-ab14-7fcdc05a4748","Type":"ContainerStarted","Data":"803d416266634b48152c9c69390acadc75fb0e4c83ed35347378b08011b72629"} Jan 26 10:57:24 crc kubenswrapper[5003]: I0126 10:57:24.349874 5003 generic.go:334] "Generic (PLEG): container finished" podID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerID="210a9db31f9899abc57fa0dfa9e17b430cfed6645b9fdc9f74b9063389647aee" exitCode=0 Jan 26 10:57:24 crc kubenswrapper[5003]: I0126 10:57:24.349963 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" event={"ID":"9532cd56-bc22-4155-ab14-7fcdc05a4748","Type":"ContainerDied","Data":"210a9db31f9899abc57fa0dfa9e17b430cfed6645b9fdc9f74b9063389647aee"} Jan 26 10:57:25 crc kubenswrapper[5003]: I0126 10:57:25.370731 5003 generic.go:334] "Generic (PLEG): container finished" podID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerID="3728c52b53f4478f4e64571c3cd299ad0d2dbcdc1eed0997b218681fbd3511dd" exitCode=0 Jan 26 10:57:25 crc kubenswrapper[5003]: I0126 10:57:25.370835 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" event={"ID":"9532cd56-bc22-4155-ab14-7fcdc05a4748","Type":"ContainerDied","Data":"3728c52b53f4478f4e64571c3cd299ad0d2dbcdc1eed0997b218681fbd3511dd"} Jan 26 10:57:26 crc kubenswrapper[5003]: I0126 10:57:26.837136 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:26 crc kubenswrapper[5003]: I0126 10:57:26.998124 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6br4\" (UniqueName: \"kubernetes.io/projected/9532cd56-bc22-4155-ab14-7fcdc05a4748-kube-api-access-p6br4\") pod \"9532cd56-bc22-4155-ab14-7fcdc05a4748\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " Jan 26 10:57:26 crc kubenswrapper[5003]: I0126 10:57:26.998569 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-util\") pod \"9532cd56-bc22-4155-ab14-7fcdc05a4748\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " Jan 26 10:57:26 crc kubenswrapper[5003]: I0126 10:57:26.998635 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-bundle\") pod \"9532cd56-bc22-4155-ab14-7fcdc05a4748\" (UID: \"9532cd56-bc22-4155-ab14-7fcdc05a4748\") " Jan 26 10:57:26 crc kubenswrapper[5003]: I0126 10:57:26.999192 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-bundle" (OuterVolumeSpecName: "bundle") pod "9532cd56-bc22-4155-ab14-7fcdc05a4748" (UID: "9532cd56-bc22-4155-ab14-7fcdc05a4748"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:57:26 crc kubenswrapper[5003]: I0126 10:57:26.999672 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.005753 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9532cd56-bc22-4155-ab14-7fcdc05a4748-kube-api-access-p6br4" (OuterVolumeSpecName: "kube-api-access-p6br4") pod "9532cd56-bc22-4155-ab14-7fcdc05a4748" (UID: "9532cd56-bc22-4155-ab14-7fcdc05a4748"). InnerVolumeSpecName "kube-api-access-p6br4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.008302 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-util" (OuterVolumeSpecName: "util") pod "9532cd56-bc22-4155-ab14-7fcdc05a4748" (UID: "9532cd56-bc22-4155-ab14-7fcdc05a4748"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.100520 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6br4\" (UniqueName: \"kubernetes.io/projected/9532cd56-bc22-4155-ab14-7fcdc05a4748-kube-api-access-p6br4\") on node \"crc\" DevicePath \"\"" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.101051 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9532cd56-bc22-4155-ab14-7fcdc05a4748-util\") on node \"crc\" DevicePath \"\"" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.399473 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" event={"ID":"9532cd56-bc22-4155-ab14-7fcdc05a4748","Type":"ContainerDied","Data":"803d416266634b48152c9c69390acadc75fb0e4c83ed35347378b08011b72629"} Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.399517 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="803d416266634b48152c9c69390acadc75fb0e4c83ed35347378b08011b72629" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.399600 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.894526 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/root-account-create-update-x8p2x"] Jan 26 10:57:27 crc kubenswrapper[5003]: E0126 10:57:27.894850 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerName="pull" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.894867 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerName="pull" Jan 26 10:57:27 crc kubenswrapper[5003]: E0126 10:57:27.894900 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerName="util" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.894909 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerName="util" Jan 26 10:57:27 crc kubenswrapper[5003]: E0126 10:57:27.894919 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerName="extract" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.894929 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerName="extract" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.895073 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9532cd56-bc22-4155-ab14-7fcdc05a4748" containerName="extract" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.895636 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.901567 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 26 10:57:27 crc kubenswrapper[5003]: I0126 10:57:27.920414 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/root-account-create-update-x8p2x"] Jan 26 10:57:28 crc kubenswrapper[5003]: I0126 10:57:28.015567 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d959552e-4345-4061-84f9-bbe50cca4b4d-operator-scripts\") pod \"root-account-create-update-x8p2x\" (UID: \"d959552e-4345-4061-84f9-bbe50cca4b4d\") " pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:28 crc kubenswrapper[5003]: I0126 10:57:28.015797 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kzhj\" (UniqueName: \"kubernetes.io/projected/d959552e-4345-4061-84f9-bbe50cca4b4d-kube-api-access-2kzhj\") pod \"root-account-create-update-x8p2x\" (UID: \"d959552e-4345-4061-84f9-bbe50cca4b4d\") " pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:28 crc kubenswrapper[5003]: I0126 10:57:28.117373 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d959552e-4345-4061-84f9-bbe50cca4b4d-operator-scripts\") pod \"root-account-create-update-x8p2x\" (UID: \"d959552e-4345-4061-84f9-bbe50cca4b4d\") " pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:28 crc kubenswrapper[5003]: I0126 10:57:28.117556 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kzhj\" (UniqueName: \"kubernetes.io/projected/d959552e-4345-4061-84f9-bbe50cca4b4d-kube-api-access-2kzhj\") pod \"root-account-create-update-x8p2x\" (UID: \"d959552e-4345-4061-84f9-bbe50cca4b4d\") " pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:28 crc kubenswrapper[5003]: I0126 10:57:28.118902 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d959552e-4345-4061-84f9-bbe50cca4b4d-operator-scripts\") pod \"root-account-create-update-x8p2x\" (UID: \"d959552e-4345-4061-84f9-bbe50cca4b4d\") " pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:28 crc kubenswrapper[5003]: I0126 10:57:28.144009 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kzhj\" (UniqueName: \"kubernetes.io/projected/d959552e-4345-4061-84f9-bbe50cca4b4d-kube-api-access-2kzhj\") pod \"root-account-create-update-x8p2x\" (UID: \"d959552e-4345-4061-84f9-bbe50cca4b4d\") " pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:28 crc kubenswrapper[5003]: I0126 10:57:28.222578 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:28 crc kubenswrapper[5003]: I0126 10:57:28.635913 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/root-account-create-update-x8p2x"] Jan 26 10:57:29 crc kubenswrapper[5003]: I0126 10:57:29.219692 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/openstack-galera-2" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerName="galera" probeResult="failure" output=< Jan 26 10:57:29 crc kubenswrapper[5003]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Jan 26 10:57:29 crc kubenswrapper[5003]: > Jan 26 10:57:29 crc kubenswrapper[5003]: I0126 10:57:29.418412 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/root-account-create-update-x8p2x" event={"ID":"d959552e-4345-4061-84f9-bbe50cca4b4d","Type":"ContainerStarted","Data":"b96573f3c47c517d1cfd1917cbda877c77d172f10174f8f67d024f81a4025c19"} Jan 26 10:57:29 crc kubenswrapper[5003]: I0126 10:57:29.418463 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/root-account-create-update-x8p2x" event={"ID":"d959552e-4345-4061-84f9-bbe50cca4b4d","Type":"ContainerStarted","Data":"c43ed8553837abb36fce255388a8f73e207fb9a5d506622fc525f93013fee481"} Jan 26 10:57:29 crc kubenswrapper[5003]: I0126 10:57:29.435613 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/root-account-create-update-x8p2x" podStartSLOduration=2.435588218 podStartE2EDuration="2.435588218s" podCreationTimestamp="2026-01-26 10:57:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:57:29.434371963 +0000 UTC m=+864.975597534" watchObservedRunningTime="2026-01-26 10:57:29.435588218 +0000 UTC m=+864.976813779" Jan 26 10:57:30 crc kubenswrapper[5003]: I0126 10:57:30.430270 5003 generic.go:334] "Generic (PLEG): container finished" podID="d959552e-4345-4061-84f9-bbe50cca4b4d" containerID="b96573f3c47c517d1cfd1917cbda877c77d172f10174f8f67d024f81a4025c19" exitCode=0 Jan 26 10:57:30 crc kubenswrapper[5003]: I0126 10:57:30.430380 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/root-account-create-update-x8p2x" event={"ID":"d959552e-4345-4061-84f9-bbe50cca4b4d","Type":"ContainerDied","Data":"b96573f3c47c517d1cfd1917cbda877c77d172f10174f8f67d024f81a4025c19"} Jan 26 10:57:31 crc kubenswrapper[5003]: I0126 10:57:31.768487 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:31 crc kubenswrapper[5003]: I0126 10:57:31.874323 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d959552e-4345-4061-84f9-bbe50cca4b4d-operator-scripts\") pod \"d959552e-4345-4061-84f9-bbe50cca4b4d\" (UID: \"d959552e-4345-4061-84f9-bbe50cca4b4d\") " Jan 26 10:57:31 crc kubenswrapper[5003]: I0126 10:57:31.875033 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d959552e-4345-4061-84f9-bbe50cca4b4d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d959552e-4345-4061-84f9-bbe50cca4b4d" (UID: "d959552e-4345-4061-84f9-bbe50cca4b4d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:57:31 crc kubenswrapper[5003]: I0126 10:57:31.875153 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kzhj\" (UniqueName: \"kubernetes.io/projected/d959552e-4345-4061-84f9-bbe50cca4b4d-kube-api-access-2kzhj\") pod \"d959552e-4345-4061-84f9-bbe50cca4b4d\" (UID: \"d959552e-4345-4061-84f9-bbe50cca4b4d\") " Jan 26 10:57:31 crc kubenswrapper[5003]: I0126 10:57:31.875538 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d959552e-4345-4061-84f9-bbe50cca4b4d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 10:57:31 crc kubenswrapper[5003]: I0126 10:57:31.882100 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d959552e-4345-4061-84f9-bbe50cca4b4d-kube-api-access-2kzhj" (OuterVolumeSpecName: "kube-api-access-2kzhj") pod "d959552e-4345-4061-84f9-bbe50cca4b4d" (UID: "d959552e-4345-4061-84f9-bbe50cca4b4d"). InnerVolumeSpecName "kube-api-access-2kzhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:57:31 crc kubenswrapper[5003]: I0126 10:57:31.977108 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kzhj\" (UniqueName: \"kubernetes.io/projected/d959552e-4345-4061-84f9-bbe50cca4b4d-kube-api-access-2kzhj\") on node \"crc\" DevicePath \"\"" Jan 26 10:57:32 crc kubenswrapper[5003]: I0126 10:57:32.445172 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/root-account-create-update-x8p2x" event={"ID":"d959552e-4345-4061-84f9-bbe50cca4b4d","Type":"ContainerDied","Data":"c43ed8553837abb36fce255388a8f73e207fb9a5d506622fc525f93013fee481"} Jan 26 10:57:32 crc kubenswrapper[5003]: I0126 10:57:32.445488 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c43ed8553837abb36fce255388a8f73e207fb9a5d506622fc525f93013fee481" Jan 26 10:57:32 crc kubenswrapper[5003]: I0126 10:57:32.445244 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/root-account-create-update-x8p2x" Jan 26 10:57:33 crc kubenswrapper[5003]: I0126 10:57:33.974450 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:57:34 crc kubenswrapper[5003]: I0126 10:57:34.052840 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 10:57:35 crc kubenswrapper[5003]: I0126 10:57:35.517186 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:57:35 crc kubenswrapper[5003]: I0126 10:57:35.579620 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.301219 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4"] Jan 26 10:57:40 crc kubenswrapper[5003]: E0126 10:57:40.302525 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d959552e-4345-4061-84f9-bbe50cca4b4d" containerName="mariadb-account-create-update" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.302559 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d959552e-4345-4061-84f9-bbe50cca4b4d" containerName="mariadb-account-create-update" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.302819 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d959552e-4345-4061-84f9-bbe50cca4b4d" containerName="mariadb-account-create-update" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.303611 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.306064 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-xwp2s" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.307169 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4"] Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.397803 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dzqx\" (UniqueName: \"kubernetes.io/projected/5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7-kube-api-access-7dzqx\") pod \"rabbitmq-cluster-operator-779fc9694b-8r4s4\" (UID: \"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.499372 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dzqx\" (UniqueName: \"kubernetes.io/projected/5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7-kube-api-access-7dzqx\") pod \"rabbitmq-cluster-operator-779fc9694b-8r4s4\" (UID: \"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.520511 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dzqx\" (UniqueName: \"kubernetes.io/projected/5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7-kube-api-access-7dzqx\") pod \"rabbitmq-cluster-operator-779fc9694b-8r4s4\" (UID: \"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" Jan 26 10:57:40 crc kubenswrapper[5003]: I0126 10:57:40.636368 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" Jan 26 10:57:41 crc kubenswrapper[5003]: I0126 10:57:41.036853 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4"] Jan 26 10:57:41 crc kubenswrapper[5003]: I0126 10:57:41.504680 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" event={"ID":"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7","Type":"ContainerStarted","Data":"3f29437fde206ef86a5a8edc28a6b35cf22b6f0c4291c78054bad9772d6e9eac"} Jan 26 10:57:45 crc kubenswrapper[5003]: I0126 10:57:45.532068 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" event={"ID":"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7","Type":"ContainerStarted","Data":"a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5"} Jan 26 10:57:45 crc kubenswrapper[5003]: I0126 10:57:45.549424 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" podStartSLOduration=2.169227662 podStartE2EDuration="5.54940611s" podCreationTimestamp="2026-01-26 10:57:40 +0000 UTC" firstStartedPulling="2026-01-26 10:57:41.046943329 +0000 UTC m=+876.588168890" lastFinishedPulling="2026-01-26 10:57:44.427121777 +0000 UTC m=+879.968347338" observedRunningTime="2026-01-26 10:57:45.544185401 +0000 UTC m=+881.085410962" watchObservedRunningTime="2026-01-26 10:57:45.54940611 +0000 UTC m=+881.090631671" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.373582 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.376237 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.378575 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.378972 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-server-dockercfg-xgwd9" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.378992 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"rabbitmq-server-conf" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.379028 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-default-user" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.391074 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"rabbitmq-plugins-conf" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.397474 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.438036 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.438114 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.438153 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e5cf6171-4855-4722-b78a-d87822d3d337\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.438201 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2x6d\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-kube-api-access-w2x6d\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.438231 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.438252 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.438307 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.438329 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540190 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540585 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540616 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e5cf6171-4855-4722-b78a-d87822d3d337\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540657 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2x6d\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-kube-api-access-w2x6d\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540680 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540697 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540719 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540737 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.540737 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.541865 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.542058 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.546825 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.554162 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.554189 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.554222 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e5cf6171-4855-4722-b78a-d87822d3d337\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e6b32a280ba1a346c51fe10c5cea42e91be226f8702fa12d6571b52e3604d7c6/globalmount\"" pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.554992 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.559179 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2x6d\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-kube-api-access-w2x6d\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.587098 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e5cf6171-4855-4722-b78a-d87822d3d337\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337\") pod \"rabbitmq-server-0\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:49 crc kubenswrapper[5003]: I0126 10:57:49.703618 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:57:50 crc kubenswrapper[5003]: I0126 10:57:50.142943 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Jan 26 10:57:50 crc kubenswrapper[5003]: I0126 10:57:50.572499 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2","Type":"ContainerStarted","Data":"a99edba5fe7bf60f154749b714e4b8f8a52e8cefefdcabd2867fe01b0137ae80"} Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.093082 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-ltmz2"] Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.094777 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-ltmz2" Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.098553 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-hscxv" Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.100679 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-ltmz2"] Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.162577 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmxpf\" (UniqueName: \"kubernetes.io/projected/7db579cb-8411-4735-97b3-5e8e52ef06c5-kube-api-access-pmxpf\") pod \"keystone-operator-index-ltmz2\" (UID: \"7db579cb-8411-4735-97b3-5e8e52ef06c5\") " pod="openstack-operators/keystone-operator-index-ltmz2" Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.263729 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmxpf\" (UniqueName: \"kubernetes.io/projected/7db579cb-8411-4735-97b3-5e8e52ef06c5-kube-api-access-pmxpf\") pod \"keystone-operator-index-ltmz2\" (UID: \"7db579cb-8411-4735-97b3-5e8e52ef06c5\") " pod="openstack-operators/keystone-operator-index-ltmz2" Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.285940 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmxpf\" (UniqueName: \"kubernetes.io/projected/7db579cb-8411-4735-97b3-5e8e52ef06c5-kube-api-access-pmxpf\") pod \"keystone-operator-index-ltmz2\" (UID: \"7db579cb-8411-4735-97b3-5e8e52ef06c5\") " pod="openstack-operators/keystone-operator-index-ltmz2" Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.423204 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-ltmz2" Jan 26 10:57:51 crc kubenswrapper[5003]: I0126 10:57:51.847989 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-ltmz2"] Jan 26 10:57:52 crc kubenswrapper[5003]: I0126 10:57:52.596125 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-ltmz2" event={"ID":"7db579cb-8411-4735-97b3-5e8e52ef06c5","Type":"ContainerStarted","Data":"7b0dd406b9221fce5bce587638fc40589a78416b645385f6dad9ab6f833c9631"} Jan 26 10:57:56 crc kubenswrapper[5003]: I0126 10:57:56.082084 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-ltmz2"] Jan 26 10:57:56 crc kubenswrapper[5003]: I0126 10:57:56.619313 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-ltmz2" event={"ID":"7db579cb-8411-4735-97b3-5e8e52ef06c5","Type":"ContainerStarted","Data":"6623413859b35369d47f2f15e75b2a99a6a56950ab2764fa511667e9ec1a0708"} Jan 26 10:57:56 crc kubenswrapper[5003]: I0126 10:57:56.619436 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-ltmz2" podUID="7db579cb-8411-4735-97b3-5e8e52ef06c5" containerName="registry-server" containerID="cri-o://6623413859b35369d47f2f15e75b2a99a6a56950ab2764fa511667e9ec1a0708" gracePeriod=2 Jan 26 10:57:56 crc kubenswrapper[5003]: I0126 10:57:56.639238 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-ltmz2" podStartSLOduration=1.51116696 podStartE2EDuration="5.639220262s" podCreationTimestamp="2026-01-26 10:57:51 +0000 UTC" firstStartedPulling="2026-01-26 10:57:51.877866159 +0000 UTC m=+887.419091720" lastFinishedPulling="2026-01-26 10:57:56.005919421 +0000 UTC m=+891.547145022" observedRunningTime="2026-01-26 10:57:56.634666162 +0000 UTC m=+892.175891723" watchObservedRunningTime="2026-01-26 10:57:56.639220262 +0000 UTC m=+892.180445813" Jan 26 10:57:56 crc kubenswrapper[5003]: I0126 10:57:56.887006 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-sfj7j"] Jan 26 10:57:56 crc kubenswrapper[5003]: I0126 10:57:56.887730 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:57:56 crc kubenswrapper[5003]: I0126 10:57:56.894636 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-sfj7j"] Jan 26 10:57:56 crc kubenswrapper[5003]: I0126 10:57:56.945827 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbv6m\" (UniqueName: \"kubernetes.io/projected/a8d40265-176c-4b0c-add2-7d7ec6c76f50-kube-api-access-fbv6m\") pod \"keystone-operator-index-sfj7j\" (UID: \"a8d40265-176c-4b0c-add2-7d7ec6c76f50\") " pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.046757 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbv6m\" (UniqueName: \"kubernetes.io/projected/a8d40265-176c-4b0c-add2-7d7ec6c76f50-kube-api-access-fbv6m\") pod \"keystone-operator-index-sfj7j\" (UID: \"a8d40265-176c-4b0c-add2-7d7ec6c76f50\") " pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.071345 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbv6m\" (UniqueName: \"kubernetes.io/projected/a8d40265-176c-4b0c-add2-7d7ec6c76f50-kube-api-access-fbv6m\") pod \"keystone-operator-index-sfj7j\" (UID: \"a8d40265-176c-4b0c-add2-7d7ec6c76f50\") " pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.214908 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.632701 5003 generic.go:334] "Generic (PLEG): container finished" podID="7db579cb-8411-4735-97b3-5e8e52ef06c5" containerID="6623413859b35369d47f2f15e75b2a99a6a56950ab2764fa511667e9ec1a0708" exitCode=0 Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.632793 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-ltmz2" event={"ID":"7db579cb-8411-4735-97b3-5e8e52ef06c5","Type":"ContainerDied","Data":"6623413859b35369d47f2f15e75b2a99a6a56950ab2764fa511667e9ec1a0708"} Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.636063 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2","Type":"ContainerStarted","Data":"aea8ef2f11deb741e9e98d6b61f2796f1a8a03a5993b75f8332894748caf5489"} Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.754987 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-sfj7j"] Jan 26 10:57:57 crc kubenswrapper[5003]: W0126 10:57:57.761017 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8d40265_176c_4b0c_add2_7d7ec6c76f50.slice/crio-4af33e1aece999f669fd0d0ea76b32dd4c0c2644e04cd449376596b997022b27 WatchSource:0}: Error finding container 4af33e1aece999f669fd0d0ea76b32dd4c0c2644e04cd449376596b997022b27: Status 404 returned error can't find the container with id 4af33e1aece999f669fd0d0ea76b32dd4c0c2644e04cd449376596b997022b27 Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.775472 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-ltmz2" Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.967423 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmxpf\" (UniqueName: \"kubernetes.io/projected/7db579cb-8411-4735-97b3-5e8e52ef06c5-kube-api-access-pmxpf\") pod \"7db579cb-8411-4735-97b3-5e8e52ef06c5\" (UID: \"7db579cb-8411-4735-97b3-5e8e52ef06c5\") " Jan 26 10:57:57 crc kubenswrapper[5003]: I0126 10:57:57.974998 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7db579cb-8411-4735-97b3-5e8e52ef06c5-kube-api-access-pmxpf" (OuterVolumeSpecName: "kube-api-access-pmxpf") pod "7db579cb-8411-4735-97b3-5e8e52ef06c5" (UID: "7db579cb-8411-4735-97b3-5e8e52ef06c5"). InnerVolumeSpecName "kube-api-access-pmxpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.069963 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmxpf\" (UniqueName: \"kubernetes.io/projected/7db579cb-8411-4735-97b3-5e8e52ef06c5-kube-api-access-pmxpf\") on node \"crc\" DevicePath \"\"" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.098185 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6lx88"] Jan 26 10:57:58 crc kubenswrapper[5003]: E0126 10:57:58.098425 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db579cb-8411-4735-97b3-5e8e52ef06c5" containerName="registry-server" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.098442 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db579cb-8411-4735-97b3-5e8e52ef06c5" containerName="registry-server" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.098563 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db579cb-8411-4735-97b3-5e8e52ef06c5" containerName="registry-server" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.099528 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.114650 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6lx88"] Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.272887 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2252190e-b3ad-44ff-9973-881f9e111836-utilities\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.272927 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2252190e-b3ad-44ff-9973-881f9e111836-catalog-content\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.273024 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcf6f\" (UniqueName: \"kubernetes.io/projected/2252190e-b3ad-44ff-9973-881f9e111836-kube-api-access-fcf6f\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.374258 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2252190e-b3ad-44ff-9973-881f9e111836-utilities\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.374357 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2252190e-b3ad-44ff-9973-881f9e111836-catalog-content\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.374465 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcf6f\" (UniqueName: \"kubernetes.io/projected/2252190e-b3ad-44ff-9973-881f9e111836-kube-api-access-fcf6f\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.374827 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2252190e-b3ad-44ff-9973-881f9e111836-utilities\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.374856 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2252190e-b3ad-44ff-9973-881f9e111836-catalog-content\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.392265 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcf6f\" (UniqueName: \"kubernetes.io/projected/2252190e-b3ad-44ff-9973-881f9e111836-kube-api-access-fcf6f\") pod \"certified-operators-6lx88\" (UID: \"2252190e-b3ad-44ff-9973-881f9e111836\") " pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.416982 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.644230 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-sfj7j" event={"ID":"a8d40265-176c-4b0c-add2-7d7ec6c76f50","Type":"ContainerStarted","Data":"0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702"} Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.644281 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-sfj7j" event={"ID":"a8d40265-176c-4b0c-add2-7d7ec6c76f50","Type":"ContainerStarted","Data":"4af33e1aece999f669fd0d0ea76b32dd4c0c2644e04cd449376596b997022b27"} Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.647988 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-ltmz2" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.648210 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-ltmz2" event={"ID":"7db579cb-8411-4735-97b3-5e8e52ef06c5","Type":"ContainerDied","Data":"7b0dd406b9221fce5bce587638fc40589a78416b645385f6dad9ab6f833c9631"} Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.648252 5003 scope.go:117] "RemoveContainer" containerID="6623413859b35369d47f2f15e75b2a99a6a56950ab2764fa511667e9ec1a0708" Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.667163 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6lx88"] Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.669083 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-sfj7j" podStartSLOduration=2.26260205 podStartE2EDuration="2.669057893s" podCreationTimestamp="2026-01-26 10:57:56 +0000 UTC" firstStartedPulling="2026-01-26 10:57:57.7648963 +0000 UTC m=+893.306121861" lastFinishedPulling="2026-01-26 10:57:58.171352143 +0000 UTC m=+893.712577704" observedRunningTime="2026-01-26 10:57:58.668050065 +0000 UTC m=+894.209275626" watchObservedRunningTime="2026-01-26 10:57:58.669057893 +0000 UTC m=+894.210283454" Jan 26 10:57:58 crc kubenswrapper[5003]: W0126 10:57:58.684492 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2252190e_b3ad_44ff_9973_881f9e111836.slice/crio-ec3d9d110027eb985fe538fa97f1e7c1a98d50e6207841309ae60d73113e2d62 WatchSource:0}: Error finding container ec3d9d110027eb985fe538fa97f1e7c1a98d50e6207841309ae60d73113e2d62: Status 404 returned error can't find the container with id ec3d9d110027eb985fe538fa97f1e7c1a98d50e6207841309ae60d73113e2d62 Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.701441 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-ltmz2"] Jan 26 10:57:58 crc kubenswrapper[5003]: I0126 10:57:58.707121 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-ltmz2"] Jan 26 10:57:59 crc kubenswrapper[5003]: I0126 10:57:59.010501 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7db579cb-8411-4735-97b3-5e8e52ef06c5" path="/var/lib/kubelet/pods/7db579cb-8411-4735-97b3-5e8e52ef06c5/volumes" Jan 26 10:57:59 crc kubenswrapper[5003]: I0126 10:57:59.654369 5003 generic.go:334] "Generic (PLEG): container finished" podID="2252190e-b3ad-44ff-9973-881f9e111836" containerID="239d0004851c16c2c4f4ba55a11166db03c909ad5186c00600073a1ed405a55d" exitCode=0 Jan 26 10:57:59 crc kubenswrapper[5003]: I0126 10:57:59.654467 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lx88" event={"ID":"2252190e-b3ad-44ff-9973-881f9e111836","Type":"ContainerDied","Data":"239d0004851c16c2c4f4ba55a11166db03c909ad5186c00600073a1ed405a55d"} Jan 26 10:57:59 crc kubenswrapper[5003]: I0126 10:57:59.654501 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lx88" event={"ID":"2252190e-b3ad-44ff-9973-881f9e111836","Type":"ContainerStarted","Data":"ec3d9d110027eb985fe538fa97f1e7c1a98d50e6207841309ae60d73113e2d62"} Jan 26 10:58:03 crc kubenswrapper[5003]: I0126 10:58:03.679688 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lx88" event={"ID":"2252190e-b3ad-44ff-9973-881f9e111836","Type":"ContainerStarted","Data":"3e92098273fe3ba3459f2d5e3bdc34ee005f2854ee60120d6951cb3e6ab876c8"} Jan 26 10:58:04 crc kubenswrapper[5003]: I0126 10:58:04.688278 5003 generic.go:334] "Generic (PLEG): container finished" podID="2252190e-b3ad-44ff-9973-881f9e111836" containerID="3e92098273fe3ba3459f2d5e3bdc34ee005f2854ee60120d6951cb3e6ab876c8" exitCode=0 Jan 26 10:58:04 crc kubenswrapper[5003]: I0126 10:58:04.688431 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lx88" event={"ID":"2252190e-b3ad-44ff-9973-881f9e111836","Type":"ContainerDied","Data":"3e92098273fe3ba3459f2d5e3bdc34ee005f2854ee60120d6951cb3e6ab876c8"} Jan 26 10:58:05 crc kubenswrapper[5003]: I0126 10:58:05.695892 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lx88" event={"ID":"2252190e-b3ad-44ff-9973-881f9e111836","Type":"ContainerStarted","Data":"73f85b4e46965656b85a31e4ea547d3aebc12dc95de860a4b06ed52e68d1ddf5"} Jan 26 10:58:05 crc kubenswrapper[5003]: I0126 10:58:05.714901 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6lx88" podStartSLOduration=2.227133834 podStartE2EDuration="7.714885367s" podCreationTimestamp="2026-01-26 10:57:58 +0000 UTC" firstStartedPulling="2026-01-26 10:57:59.656147267 +0000 UTC m=+895.197372828" lastFinishedPulling="2026-01-26 10:58:05.1438988 +0000 UTC m=+900.685124361" observedRunningTime="2026-01-26 10:58:05.709603407 +0000 UTC m=+901.250828968" watchObservedRunningTime="2026-01-26 10:58:05.714885367 +0000 UTC m=+901.256110928" Jan 26 10:58:07 crc kubenswrapper[5003]: I0126 10:58:07.215739 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:58:07 crc kubenswrapper[5003]: I0126 10:58:07.215793 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:58:07 crc kubenswrapper[5003]: I0126 10:58:07.252531 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:58:07 crc kubenswrapper[5003]: I0126 10:58:07.754201 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 10:58:08 crc kubenswrapper[5003]: I0126 10:58:08.417428 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:58:08 crc kubenswrapper[5003]: I0126 10:58:08.417504 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:58:08 crc kubenswrapper[5003]: I0126 10:58:08.461606 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:58:09 crc kubenswrapper[5003]: I0126 10:58:09.040404 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:58:09 crc kubenswrapper[5003]: I0126 10:58:09.040466 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.533322 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899"] Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.535791 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.542922 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-x8f6q" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.546696 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899"] Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.578609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkhxd\" (UniqueName: \"kubernetes.io/projected/33f7fd58-e985-460d-a939-7b9bf745fdae-kube-api-access-nkhxd\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.578684 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-bundle\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.578773 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-util\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.679840 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-util\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.679964 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkhxd\" (UniqueName: \"kubernetes.io/projected/33f7fd58-e985-460d-a939-7b9bf745fdae-kube-api-access-nkhxd\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.680006 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-bundle\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.680400 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-util\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.680523 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-bundle\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.704756 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkhxd\" (UniqueName: \"kubernetes.io/projected/33f7fd58-e985-460d-a939-7b9bf745fdae-kube-api-access-nkhxd\") pod \"810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:11 crc kubenswrapper[5003]: I0126 10:58:11.866921 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:13 crc kubenswrapper[5003]: I0126 10:58:13.043537 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899"] Jan 26 10:58:13 crc kubenswrapper[5003]: I0126 10:58:13.761219 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" event={"ID":"33f7fd58-e985-460d-a939-7b9bf745fdae","Type":"ContainerStarted","Data":"ae754799293de07c4f63bc92fdb2ac34fc0b90081c96f3dc1f505e465fdc6b30"} Jan 26 10:58:18 crc kubenswrapper[5003]: I0126 10:58:18.468236 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6lx88" Jan 26 10:58:18 crc kubenswrapper[5003]: I0126 10:58:18.798234 5003 generic.go:334] "Generic (PLEG): container finished" podID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerID="ba6491ef7c3ca38b339ce9377796fbbc7e035ffc4e2bac9bf62b5f912f249fb7" exitCode=0 Jan 26 10:58:18 crc kubenswrapper[5003]: I0126 10:58:18.798344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" event={"ID":"33f7fd58-e985-460d-a939-7b9bf745fdae","Type":"ContainerDied","Data":"ba6491ef7c3ca38b339ce9377796fbbc7e035ffc4e2bac9bf62b5f912f249fb7"} Jan 26 10:58:18 crc kubenswrapper[5003]: I0126 10:58:18.894138 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8x8v6"] Jan 26 10:58:18 crc kubenswrapper[5003]: I0126 10:58:18.895407 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:18 crc kubenswrapper[5003]: I0126 10:58:18.914475 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8x8v6"] Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.085081 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-utilities\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.085294 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvmtv\" (UniqueName: \"kubernetes.io/projected/9459bb8b-1d5f-4868-9b32-0bf3491773d1-kube-api-access-gvmtv\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.085350 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-catalog-content\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.186439 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvmtv\" (UniqueName: \"kubernetes.io/projected/9459bb8b-1d5f-4868-9b32-0bf3491773d1-kube-api-access-gvmtv\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.186500 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-catalog-content\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.186541 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-utilities\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.186930 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-utilities\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.186973 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-catalog-content\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.218012 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvmtv\" (UniqueName: \"kubernetes.io/projected/9459bb8b-1d5f-4868-9b32-0bf3491773d1-kube-api-access-gvmtv\") pod \"redhat-operators-8x8v6\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.518143 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.806692 5003 generic.go:334] "Generic (PLEG): container finished" podID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerID="c28dd943aa6e48c92d943b57357db4d7a757ee3496e80c6f8afc1826f89555ac" exitCode=0 Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.806861 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" event={"ID":"33f7fd58-e985-460d-a939-7b9bf745fdae","Type":"ContainerDied","Data":"c28dd943aa6e48c92d943b57357db4d7a757ee3496e80c6f8afc1826f89555ac"} Jan 26 10:58:19 crc kubenswrapper[5003]: I0126 10:58:19.930573 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8x8v6"] Jan 26 10:58:19 crc kubenswrapper[5003]: W0126 10:58:19.934465 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9459bb8b_1d5f_4868_9b32_0bf3491773d1.slice/crio-f4e6cf7c59dcf218859e7c5cec06eb2be76d768df2574f4edf8b4fc076b092ce WatchSource:0}: Error finding container f4e6cf7c59dcf218859e7c5cec06eb2be76d768df2574f4edf8b4fc076b092ce: Status 404 returned error can't find the container with id f4e6cf7c59dcf218859e7c5cec06eb2be76d768df2574f4edf8b4fc076b092ce Jan 26 10:58:20 crc kubenswrapper[5003]: I0126 10:58:20.317819 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6lx88"] Jan 26 10:58:20 crc kubenswrapper[5003]: I0126 10:58:20.819053 5003 generic.go:334] "Generic (PLEG): container finished" podID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerID="b5b8b7f1b7956b2dff3c4ff61fd4ba573bd8ac81f9d059977cc3d8425eb02601" exitCode=0 Jan 26 10:58:20 crc kubenswrapper[5003]: I0126 10:58:20.819117 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" event={"ID":"33f7fd58-e985-460d-a939-7b9bf745fdae","Type":"ContainerDied","Data":"b5b8b7f1b7956b2dff3c4ff61fd4ba573bd8ac81f9d059977cc3d8425eb02601"} Jan 26 10:58:20 crc kubenswrapper[5003]: I0126 10:58:20.820955 5003 generic.go:334] "Generic (PLEG): container finished" podID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerID="e2ec5f822485ac209d93a4a6dd3ad734bf6b18fde459ed1a8c3a7f2bf181d05a" exitCode=0 Jan 26 10:58:20 crc kubenswrapper[5003]: I0126 10:58:20.821008 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8x8v6" event={"ID":"9459bb8b-1d5f-4868-9b32-0bf3491773d1","Type":"ContainerDied","Data":"e2ec5f822485ac209d93a4a6dd3ad734bf6b18fde459ed1a8c3a7f2bf181d05a"} Jan 26 10:58:20 crc kubenswrapper[5003]: I0126 10:58:20.821046 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8x8v6" event={"ID":"9459bb8b-1d5f-4868-9b32-0bf3491773d1","Type":"ContainerStarted","Data":"f4e6cf7c59dcf218859e7c5cec06eb2be76d768df2574f4edf8b4fc076b092ce"} Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.085700 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5pkzq"] Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.085981 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5pkzq" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="registry-server" containerID="cri-o://47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b" gracePeriod=2 Jan 26 10:58:21 crc kubenswrapper[5003]: E0126 10:58:21.139349 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b is running failed: container process not found" containerID="47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 10:58:21 crc kubenswrapper[5003]: E0126 10:58:21.144445 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b is running failed: container process not found" containerID="47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 10:58:21 crc kubenswrapper[5003]: E0126 10:58:21.145178 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b is running failed: container process not found" containerID="47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 10:58:21 crc kubenswrapper[5003]: E0126 10:58:21.145249 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-5pkzq" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="registry-server" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.543223 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.731724 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g7j4\" (UniqueName: \"kubernetes.io/projected/656e647b-438f-442f-bc26-b92b57b3b76e-kube-api-access-6g7j4\") pod \"656e647b-438f-442f-bc26-b92b57b3b76e\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.731857 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-catalog-content\") pod \"656e647b-438f-442f-bc26-b92b57b3b76e\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.731910 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-utilities\") pod \"656e647b-438f-442f-bc26-b92b57b3b76e\" (UID: \"656e647b-438f-442f-bc26-b92b57b3b76e\") " Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.732787 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-utilities" (OuterVolumeSpecName: "utilities") pod "656e647b-438f-442f-bc26-b92b57b3b76e" (UID: "656e647b-438f-442f-bc26-b92b57b3b76e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.744557 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/656e647b-438f-442f-bc26-b92b57b3b76e-kube-api-access-6g7j4" (OuterVolumeSpecName: "kube-api-access-6g7j4") pod "656e647b-438f-442f-bc26-b92b57b3b76e" (UID: "656e647b-438f-442f-bc26-b92b57b3b76e"). InnerVolumeSpecName "kube-api-access-6g7j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.783858 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "656e647b-438f-442f-bc26-b92b57b3b76e" (UID: "656e647b-438f-442f-bc26-b92b57b3b76e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.828564 5003 generic.go:334] "Generic (PLEG): container finished" podID="656e647b-438f-442f-bc26-b92b57b3b76e" containerID="47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b" exitCode=0 Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.828636 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pkzq" event={"ID":"656e647b-438f-442f-bc26-b92b57b3b76e","Type":"ContainerDied","Data":"47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b"} Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.828668 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pkzq" event={"ID":"656e647b-438f-442f-bc26-b92b57b3b76e","Type":"ContainerDied","Data":"c870f67cd6f4314d6cb0f192fb2d90636fed35a4d04cefac034434fc36f28721"} Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.828639 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pkzq" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.828695 5003 scope.go:117] "RemoveContainer" containerID="47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.830365 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8x8v6" event={"ID":"9459bb8b-1d5f-4868-9b32-0bf3491773d1","Type":"ContainerStarted","Data":"16c3353638600fb38cbbd2aa70e2bcd9bd3547f34f714407cec7e91e6cf3a78f"} Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.835679 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.835933 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/656e647b-438f-442f-bc26-b92b57b3b76e-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.835945 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g7j4\" (UniqueName: \"kubernetes.io/projected/656e647b-438f-442f-bc26-b92b57b3b76e-kube-api-access-6g7j4\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.855497 5003 scope.go:117] "RemoveContainer" containerID="a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.883723 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5pkzq"] Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.895916 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5pkzq"] Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.906964 5003 scope.go:117] "RemoveContainer" containerID="b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.928137 5003 scope.go:117] "RemoveContainer" containerID="47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b" Jan 26 10:58:21 crc kubenswrapper[5003]: E0126 10:58:21.931572 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b\": container with ID starting with 47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b not found: ID does not exist" containerID="47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.931635 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b"} err="failed to get container status \"47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b\": rpc error: code = NotFound desc = could not find container \"47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b\": container with ID starting with 47d41f0eb68a13937282e5ac2068864b68dbe8ab8126357e59880e66b5dbf67b not found: ID does not exist" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.931664 5003 scope.go:117] "RemoveContainer" containerID="a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2" Jan 26 10:58:21 crc kubenswrapper[5003]: E0126 10:58:21.932599 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2\": container with ID starting with a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2 not found: ID does not exist" containerID="a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.932655 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2"} err="failed to get container status \"a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2\": rpc error: code = NotFound desc = could not find container \"a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2\": container with ID starting with a946512d3bca2fe67a2d74d732fc3011db9cc99499cb174b75f32c948ed2afc2 not found: ID does not exist" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.932687 5003 scope.go:117] "RemoveContainer" containerID="b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75" Jan 26 10:58:21 crc kubenswrapper[5003]: E0126 10:58:21.933169 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75\": container with ID starting with b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75 not found: ID does not exist" containerID="b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75" Jan 26 10:58:21 crc kubenswrapper[5003]: I0126 10:58:21.933229 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75"} err="failed to get container status \"b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75\": rpc error: code = NotFound desc = could not find container \"b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75\": container with ID starting with b55b7c385ebf1df285a58a40ba2a0c7567b8a5a8ff18c492b585f8f23d7a4c75 not found: ID does not exist" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.089412 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.240131 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkhxd\" (UniqueName: \"kubernetes.io/projected/33f7fd58-e985-460d-a939-7b9bf745fdae-kube-api-access-nkhxd\") pod \"33f7fd58-e985-460d-a939-7b9bf745fdae\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.240235 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-util\") pod \"33f7fd58-e985-460d-a939-7b9bf745fdae\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.240344 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-bundle\") pod \"33f7fd58-e985-460d-a939-7b9bf745fdae\" (UID: \"33f7fd58-e985-460d-a939-7b9bf745fdae\") " Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.241334 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-bundle" (OuterVolumeSpecName: "bundle") pod "33f7fd58-e985-460d-a939-7b9bf745fdae" (UID: "33f7fd58-e985-460d-a939-7b9bf745fdae"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.244661 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33f7fd58-e985-460d-a939-7b9bf745fdae-kube-api-access-nkhxd" (OuterVolumeSpecName: "kube-api-access-nkhxd") pod "33f7fd58-e985-460d-a939-7b9bf745fdae" (UID: "33f7fd58-e985-460d-a939-7b9bf745fdae"). InnerVolumeSpecName "kube-api-access-nkhxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.263709 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-util" (OuterVolumeSpecName: "util") pod "33f7fd58-e985-460d-a939-7b9bf745fdae" (UID: "33f7fd58-e985-460d-a939-7b9bf745fdae"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.342218 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.342256 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkhxd\" (UniqueName: \"kubernetes.io/projected/33f7fd58-e985-460d-a939-7b9bf745fdae-kube-api-access-nkhxd\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.342268 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/33f7fd58-e985-460d-a939-7b9bf745fdae-util\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.837033 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" event={"ID":"33f7fd58-e985-460d-a939-7b9bf745fdae","Type":"ContainerDied","Data":"ae754799293de07c4f63bc92fdb2ac34fc0b90081c96f3dc1f505e465fdc6b30"} Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.837079 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.837082 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae754799293de07c4f63bc92fdb2ac34fc0b90081c96f3dc1f505e465fdc6b30" Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.838613 5003 generic.go:334] "Generic (PLEG): container finished" podID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerID="16c3353638600fb38cbbd2aa70e2bcd9bd3547f34f714407cec7e91e6cf3a78f" exitCode=0 Jan 26 10:58:22 crc kubenswrapper[5003]: I0126 10:58:22.838693 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8x8v6" event={"ID":"9459bb8b-1d5f-4868-9b32-0bf3491773d1","Type":"ContainerDied","Data":"16c3353638600fb38cbbd2aa70e2bcd9bd3547f34f714407cec7e91e6cf3a78f"} Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.009507 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" path="/var/lib/kubelet/pods/656e647b-438f-442f-bc26-b92b57b3b76e/volumes" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.690580 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-thsgm"] Jan 26 10:58:23 crc kubenswrapper[5003]: E0126 10:58:23.691410 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="registry-server" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.691507 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="registry-server" Jan 26 10:58:23 crc kubenswrapper[5003]: E0126 10:58:23.691585 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerName="util" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.691657 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerName="util" Jan 26 10:58:23 crc kubenswrapper[5003]: E0126 10:58:23.691732 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerName="pull" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.691800 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerName="pull" Jan 26 10:58:23 crc kubenswrapper[5003]: E0126 10:58:23.691888 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="extract-utilities" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.691963 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="extract-utilities" Jan 26 10:58:23 crc kubenswrapper[5003]: E0126 10:58:23.692041 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="extract-content" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.692114 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="extract-content" Jan 26 10:58:23 crc kubenswrapper[5003]: E0126 10:58:23.692195 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerName="extract" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.692261 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerName="extract" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.692477 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="33f7fd58-e985-460d-a939-7b9bf745fdae" containerName="extract" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.692577 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="656e647b-438f-442f-bc26-b92b57b3b76e" containerName="registry-server" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.693706 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.713736 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-thsgm"] Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.846937 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8x8v6" event={"ID":"9459bb8b-1d5f-4868-9b32-0bf3491773d1","Type":"ContainerStarted","Data":"0464d91a29dc2060b11b703b16663b88612245dc2a6e648a669307246854db15"} Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.862515 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hgxc\" (UniqueName: \"kubernetes.io/projected/a2e56acc-9065-4b0f-8b45-2549a989e156-kube-api-access-9hgxc\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.862787 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-utilities\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.862928 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-catalog-content\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.873055 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8x8v6" podStartSLOduration=3.449245864 podStartE2EDuration="5.873036622s" podCreationTimestamp="2026-01-26 10:58:18 +0000 UTC" firstStartedPulling="2026-01-26 10:58:20.822997854 +0000 UTC m=+916.364223425" lastFinishedPulling="2026-01-26 10:58:23.246788612 +0000 UTC m=+918.788014183" observedRunningTime="2026-01-26 10:58:23.866358552 +0000 UTC m=+919.407584133" watchObservedRunningTime="2026-01-26 10:58:23.873036622 +0000 UTC m=+919.414262173" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.964212 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hgxc\" (UniqueName: \"kubernetes.io/projected/a2e56acc-9065-4b0f-8b45-2549a989e156-kube-api-access-9hgxc\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.964474 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-utilities\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.964634 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-catalog-content\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.965008 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-utilities\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.965068 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-catalog-content\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:23 crc kubenswrapper[5003]: I0126 10:58:23.982431 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hgxc\" (UniqueName: \"kubernetes.io/projected/a2e56acc-9065-4b0f-8b45-2549a989e156-kube-api-access-9hgxc\") pod \"community-operators-thsgm\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:24 crc kubenswrapper[5003]: I0126 10:58:24.009308 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:24 crc kubenswrapper[5003]: I0126 10:58:24.460432 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-thsgm"] Jan 26 10:58:24 crc kubenswrapper[5003]: W0126 10:58:24.471474 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2e56acc_9065_4b0f_8b45_2549a989e156.slice/crio-b22c2dd4764e12e5ef43b9ab8208f4872f3c2401acad42f2ed6b1b36cb9d7d06 WatchSource:0}: Error finding container b22c2dd4764e12e5ef43b9ab8208f4872f3c2401acad42f2ed6b1b36cb9d7d06: Status 404 returned error can't find the container with id b22c2dd4764e12e5ef43b9ab8208f4872f3c2401acad42f2ed6b1b36cb9d7d06 Jan 26 10:58:24 crc kubenswrapper[5003]: I0126 10:58:24.854652 5003 generic.go:334] "Generic (PLEG): container finished" podID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerID="378ace15b9f1315d19662950ab0b323b0dd3a36b8a40f10e3010709696067855" exitCode=0 Jan 26 10:58:24 crc kubenswrapper[5003]: I0126 10:58:24.854736 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-thsgm" event={"ID":"a2e56acc-9065-4b0f-8b45-2549a989e156","Type":"ContainerDied","Data":"378ace15b9f1315d19662950ab0b323b0dd3a36b8a40f10e3010709696067855"} Jan 26 10:58:24 crc kubenswrapper[5003]: I0126 10:58:24.854961 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-thsgm" event={"ID":"a2e56acc-9065-4b0f-8b45-2549a989e156","Type":"ContainerStarted","Data":"b22c2dd4764e12e5ef43b9ab8208f4872f3c2401acad42f2ed6b1b36cb9d7d06"} Jan 26 10:58:25 crc kubenswrapper[5003]: I0126 10:58:25.865816 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-thsgm" event={"ID":"a2e56acc-9065-4b0f-8b45-2549a989e156","Type":"ContainerStarted","Data":"1ebf9b118c81c2587db07df17bcf0c483bf296e3137d12cd729252f449f66b98"} Jan 26 10:58:26 crc kubenswrapper[5003]: I0126 10:58:26.873773 5003 generic.go:334] "Generic (PLEG): container finished" podID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerID="1ebf9b118c81c2587db07df17bcf0c483bf296e3137d12cd729252f449f66b98" exitCode=0 Jan 26 10:58:26 crc kubenswrapper[5003]: I0126 10:58:26.873883 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-thsgm" event={"ID":"a2e56acc-9065-4b0f-8b45-2549a989e156","Type":"ContainerDied","Data":"1ebf9b118c81c2587db07df17bcf0c483bf296e3137d12cd729252f449f66b98"} Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.087607 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-57zgr"] Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.089195 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.096368 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-57zgr"] Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.222762 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-utilities\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.223083 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-catalog-content\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.223319 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brkw7\" (UniqueName: \"kubernetes.io/projected/c1a64c88-6efd-429e-af0c-989e00daf4fe-kube-api-access-brkw7\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.324834 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-utilities\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.324947 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-catalog-content\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.325081 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brkw7\" (UniqueName: \"kubernetes.io/projected/c1a64c88-6efd-429e-af0c-989e00daf4fe-kube-api-access-brkw7\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.325474 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-utilities\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.325529 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-catalog-content\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.343932 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brkw7\" (UniqueName: \"kubernetes.io/projected/c1a64c88-6efd-429e-af0c-989e00daf4fe-kube-api-access-brkw7\") pod \"redhat-marketplace-57zgr\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:28 crc kubenswrapper[5003]: I0126 10:58:28.404134 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.520411 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.521447 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.551229 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-57zgr"] Jan 26 10:58:29 crc kubenswrapper[5003]: W0126 10:58:29.579769 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1a64c88_6efd_429e_af0c_989e00daf4fe.slice/crio-357cc79d0b903c1a124f51e374547597626b83f4a5e5dd996932dc6f7a529e89 WatchSource:0}: Error finding container 357cc79d0b903c1a124f51e374547597626b83f4a5e5dd996932dc6f7a529e89: Status 404 returned error can't find the container with id 357cc79d0b903c1a124f51e374547597626b83f4a5e5dd996932dc6f7a529e89 Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.907418 5003 generic.go:334] "Generic (PLEG): container finished" podID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerID="756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185" exitCode=0 Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.907481 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57zgr" event={"ID":"c1a64c88-6efd-429e-af0c-989e00daf4fe","Type":"ContainerDied","Data":"756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185"} Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.907542 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57zgr" event={"ID":"c1a64c88-6efd-429e-af0c-989e00daf4fe","Type":"ContainerStarted","Data":"357cc79d0b903c1a124f51e374547597626b83f4a5e5dd996932dc6f7a529e89"} Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.909185 5003 generic.go:334] "Generic (PLEG): container finished" podID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerID="aea8ef2f11deb741e9e98d6b61f2796f1a8a03a5993b75f8332894748caf5489" exitCode=0 Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.909293 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2","Type":"ContainerDied","Data":"aea8ef2f11deb741e9e98d6b61f2796f1a8a03a5993b75f8332894748caf5489"} Jan 26 10:58:29 crc kubenswrapper[5003]: I0126 10:58:29.912713 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-thsgm" event={"ID":"a2e56acc-9065-4b0f-8b45-2549a989e156","Type":"ContainerStarted","Data":"4aa55387dfae1dc4652435b358f9ace601da1fed59baf76cf838f8caf1990263"} Jan 26 10:58:30 crc kubenswrapper[5003]: I0126 10:58:30.013818 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-thsgm" podStartSLOduration=2.440376096 podStartE2EDuration="7.013793937s" podCreationTimestamp="2026-01-26 10:58:23 +0000 UTC" firstStartedPulling="2026-01-26 10:58:24.855975078 +0000 UTC m=+920.397200639" lastFinishedPulling="2026-01-26 10:58:29.429392919 +0000 UTC m=+924.970618480" observedRunningTime="2026-01-26 10:58:30.008535528 +0000 UTC m=+925.549761089" watchObservedRunningTime="2026-01-26 10:58:30.013793937 +0000 UTC m=+925.555019498" Jan 26 10:58:30 crc kubenswrapper[5003]: I0126 10:58:30.607850 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8x8v6" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="registry-server" probeResult="failure" output=< Jan 26 10:58:30 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 26 10:58:30 crc kubenswrapper[5003]: > Jan 26 10:58:30 crc kubenswrapper[5003]: I0126 10:58:30.925202 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2","Type":"ContainerStarted","Data":"cb64f285d792c261060fd6ddf7d2f6c8d187e1c4445c755d57bb1da01ddfc111"} Jan 26 10:58:30 crc kubenswrapper[5003]: I0126 10:58:30.926375 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:58:30 crc kubenswrapper[5003]: I0126 10:58:30.927832 5003 generic.go:334] "Generic (PLEG): container finished" podID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerID="9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f" exitCode=0 Jan 26 10:58:30 crc kubenswrapper[5003]: I0126 10:58:30.928626 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57zgr" event={"ID":"c1a64c88-6efd-429e-af0c-989e00daf4fe","Type":"ContainerDied","Data":"9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f"} Jan 26 10:58:30 crc kubenswrapper[5003]: I0126 10:58:30.963848 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/rabbitmq-server-0" podStartSLOduration=37.362255422 podStartE2EDuration="42.963828736s" podCreationTimestamp="2026-01-26 10:57:48 +0000 UTC" firstStartedPulling="2026-01-26 10:57:50.152084754 +0000 UTC m=+885.693310315" lastFinishedPulling="2026-01-26 10:57:55.753658048 +0000 UTC m=+891.294883629" observedRunningTime="2026-01-26 10:58:30.958962908 +0000 UTC m=+926.500188469" watchObservedRunningTime="2026-01-26 10:58:30.963828736 +0000 UTC m=+926.505054297" Jan 26 10:58:34 crc kubenswrapper[5003]: I0126 10:58:34.010161 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:34 crc kubenswrapper[5003]: I0126 10:58:34.010457 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:34 crc kubenswrapper[5003]: I0126 10:58:34.048705 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:34 crc kubenswrapper[5003]: I0126 10:58:34.988160 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.285012 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z"] Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.286049 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.287742 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.288022 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-tbb4b" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.298801 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z"] Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.364223 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2pvp\" (UniqueName: \"kubernetes.io/projected/9587e58d-66ff-4a24-8373-58c7d6946575-kube-api-access-w2pvp\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.364609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-apiservice-cert\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.364671 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-webhook-cert\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.465877 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2pvp\" (UniqueName: \"kubernetes.io/projected/9587e58d-66ff-4a24-8373-58c7d6946575-kube-api-access-w2pvp\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.466322 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-apiservice-cert\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.466589 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-webhook-cert\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.475320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-apiservice-cert\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.482123 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-webhook-cert\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.495664 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2pvp\" (UniqueName: \"kubernetes.io/projected/9587e58d-66ff-4a24-8373-58c7d6946575-kube-api-access-w2pvp\") pod \"keystone-operator-controller-manager-55585fc49f-hpp2z\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:36 crc kubenswrapper[5003]: I0126 10:58:36.604247 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:39 crc kubenswrapper[5003]: I0126 10:58:39.040615 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:58:39 crc kubenswrapper[5003]: I0126 10:58:39.040944 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:58:39 crc kubenswrapper[5003]: I0126 10:58:39.561762 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:39 crc kubenswrapper[5003]: W0126 10:58:39.593566 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9587e58d_66ff_4a24_8373_58c7d6946575.slice/crio-2f29a578dce0531e6470202e3db1578b6d6bf0c5634ab34fb4b178f6e0d4e632 WatchSource:0}: Error finding container 2f29a578dce0531e6470202e3db1578b6d6bf0c5634ab34fb4b178f6e0d4e632: Status 404 returned error can't find the container with id 2f29a578dce0531e6470202e3db1578b6d6bf0c5634ab34fb4b178f6e0d4e632 Jan 26 10:58:39 crc kubenswrapper[5003]: I0126 10:58:39.599234 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z"] Jan 26 10:58:39 crc kubenswrapper[5003]: I0126 10:58:39.610154 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:39 crc kubenswrapper[5003]: I0126 10:58:39.984159 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" event={"ID":"9587e58d-66ff-4a24-8373-58c7d6946575","Type":"ContainerStarted","Data":"2f29a578dce0531e6470202e3db1578b6d6bf0c5634ab34fb4b178f6e0d4e632"} Jan 26 10:58:39 crc kubenswrapper[5003]: I0126 10:58:39.986666 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57zgr" event={"ID":"c1a64c88-6efd-429e-af0c-989e00daf4fe","Type":"ContainerStarted","Data":"eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee"} Jan 26 10:58:40 crc kubenswrapper[5003]: I0126 10:58:40.024967 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-57zgr" podStartSLOduration=2.766655912 podStartE2EDuration="12.024942949s" podCreationTimestamp="2026-01-26 10:58:28 +0000 UTC" firstStartedPulling="2026-01-26 10:58:29.909336463 +0000 UTC m=+925.450562024" lastFinishedPulling="2026-01-26 10:58:39.1676235 +0000 UTC m=+934.708849061" observedRunningTime="2026-01-26 10:58:40.005587238 +0000 UTC m=+935.546812809" watchObservedRunningTime="2026-01-26 10:58:40.024942949 +0000 UTC m=+935.566168510" Jan 26 10:58:42 crc kubenswrapper[5003]: I0126 10:58:42.884709 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-thsgm"] Jan 26 10:58:42 crc kubenswrapper[5003]: I0126 10:58:42.884938 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-thsgm" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="registry-server" containerID="cri-o://4aa55387dfae1dc4652435b358f9ace601da1fed59baf76cf838f8caf1990263" gracePeriod=2 Jan 26 10:58:44 crc kubenswrapper[5003]: E0126 10:58:44.012425 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4aa55387dfae1dc4652435b358f9ace601da1fed59baf76cf838f8caf1990263" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 10:58:44 crc kubenswrapper[5003]: E0126 10:58:44.014379 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4aa55387dfae1dc4652435b358f9ace601da1fed59baf76cf838f8caf1990263" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 10:58:44 crc kubenswrapper[5003]: E0126 10:58:44.015556 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4aa55387dfae1dc4652435b358f9ace601da1fed59baf76cf838f8caf1990263" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 10:58:44 crc kubenswrapper[5003]: E0126 10:58:44.015585 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-marketplace/community-operators-thsgm" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="registry-server" Jan 26 10:58:44 crc kubenswrapper[5003]: I0126 10:58:44.281884 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8x8v6"] Jan 26 10:58:44 crc kubenswrapper[5003]: I0126 10:58:44.282148 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8x8v6" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="registry-server" containerID="cri-o://0464d91a29dc2060b11b703b16663b88612245dc2a6e648a669307246854db15" gracePeriod=2 Jan 26 10:58:46 crc kubenswrapper[5003]: I0126 10:58:46.029544 5003 generic.go:334] "Generic (PLEG): container finished" podID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerID="0464d91a29dc2060b11b703b16663b88612245dc2a6e648a669307246854db15" exitCode=0 Jan 26 10:58:46 crc kubenswrapper[5003]: I0126 10:58:46.029586 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8x8v6" event={"ID":"9459bb8b-1d5f-4868-9b32-0bf3491773d1","Type":"ContainerDied","Data":"0464d91a29dc2060b11b703b16663b88612245dc2a6e648a669307246854db15"} Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.036855 5003 generic.go:334] "Generic (PLEG): container finished" podID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerID="4aa55387dfae1dc4652435b358f9ace601da1fed59baf76cf838f8caf1990263" exitCode=0 Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.036921 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-thsgm" event={"ID":"a2e56acc-9065-4b0f-8b45-2549a989e156","Type":"ContainerDied","Data":"4aa55387dfae1dc4652435b358f9ace601da1fed59baf76cf838f8caf1990263"} Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.462335 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.467379 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.535718 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvmtv\" (UniqueName: \"kubernetes.io/projected/9459bb8b-1d5f-4868-9b32-0bf3491773d1-kube-api-access-gvmtv\") pod \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.535782 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-catalog-content\") pod \"a2e56acc-9065-4b0f-8b45-2549a989e156\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.535814 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-catalog-content\") pod \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.535848 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-utilities\") pod \"a2e56acc-9065-4b0f-8b45-2549a989e156\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.535947 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-utilities\") pod \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\" (UID: \"9459bb8b-1d5f-4868-9b32-0bf3491773d1\") " Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.535980 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hgxc\" (UniqueName: \"kubernetes.io/projected/a2e56acc-9065-4b0f-8b45-2549a989e156-kube-api-access-9hgxc\") pod \"a2e56acc-9065-4b0f-8b45-2549a989e156\" (UID: \"a2e56acc-9065-4b0f-8b45-2549a989e156\") " Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.537470 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-utilities" (OuterVolumeSpecName: "utilities") pod "a2e56acc-9065-4b0f-8b45-2549a989e156" (UID: "a2e56acc-9065-4b0f-8b45-2549a989e156"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.537481 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-utilities" (OuterVolumeSpecName: "utilities") pod "9459bb8b-1d5f-4868-9b32-0bf3491773d1" (UID: "9459bb8b-1d5f-4868-9b32-0bf3491773d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.541420 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9459bb8b-1d5f-4868-9b32-0bf3491773d1-kube-api-access-gvmtv" (OuterVolumeSpecName: "kube-api-access-gvmtv") pod "9459bb8b-1d5f-4868-9b32-0bf3491773d1" (UID: "9459bb8b-1d5f-4868-9b32-0bf3491773d1"). InnerVolumeSpecName "kube-api-access-gvmtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.541893 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2e56acc-9065-4b0f-8b45-2549a989e156-kube-api-access-9hgxc" (OuterVolumeSpecName: "kube-api-access-9hgxc") pod "a2e56acc-9065-4b0f-8b45-2549a989e156" (UID: "a2e56acc-9065-4b0f-8b45-2549a989e156"). InnerVolumeSpecName "kube-api-access-9hgxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.587596 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2e56acc-9065-4b0f-8b45-2549a989e156" (UID: "a2e56acc-9065-4b0f-8b45-2549a989e156"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.637668 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.637954 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hgxc\" (UniqueName: \"kubernetes.io/projected/a2e56acc-9065-4b0f-8b45-2549a989e156-kube-api-access-9hgxc\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.638301 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvmtv\" (UniqueName: \"kubernetes.io/projected/9459bb8b-1d5f-4868-9b32-0bf3491773d1-kube-api-access-gvmtv\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.638406 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.638503 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2e56acc-9065-4b0f-8b45-2549a989e156-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.655386 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9459bb8b-1d5f-4868-9b32-0bf3491773d1" (UID: "9459bb8b-1d5f-4868-9b32-0bf3491773d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:47 crc kubenswrapper[5003]: I0126 10:58:47.740242 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9459bb8b-1d5f-4868-9b32-0bf3491773d1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.044965 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-thsgm" event={"ID":"a2e56acc-9065-4b0f-8b45-2549a989e156","Type":"ContainerDied","Data":"b22c2dd4764e12e5ef43b9ab8208f4872f3c2401acad42f2ed6b1b36cb9d7d06"} Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.045023 5003 scope.go:117] "RemoveContainer" containerID="4aa55387dfae1dc4652435b358f9ace601da1fed59baf76cf838f8caf1990263" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.045166 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-thsgm" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.055485 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8x8v6" event={"ID":"9459bb8b-1d5f-4868-9b32-0bf3491773d1","Type":"ContainerDied","Data":"f4e6cf7c59dcf218859e7c5cec06eb2be76d768df2574f4edf8b4fc076b092ce"} Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.055581 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8x8v6" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.076445 5003 scope.go:117] "RemoveContainer" containerID="1ebf9b118c81c2587db07df17bcf0c483bf296e3137d12cd729252f449f66b98" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.106479 5003 scope.go:117] "RemoveContainer" containerID="378ace15b9f1315d19662950ab0b323b0dd3a36b8a40f10e3010709696067855" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.116748 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-thsgm"] Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.130034 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-thsgm"] Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.134524 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8x8v6"] Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.138209 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8x8v6"] Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.143107 5003 scope.go:117] "RemoveContainer" containerID="0464d91a29dc2060b11b703b16663b88612245dc2a6e648a669307246854db15" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.159222 5003 scope.go:117] "RemoveContainer" containerID="16c3353638600fb38cbbd2aa70e2bcd9bd3547f34f714407cec7e91e6cf3a78f" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.176890 5003 scope.go:117] "RemoveContainer" containerID="e2ec5f822485ac209d93a4a6dd3ad734bf6b18fde459ed1a8c3a7f2bf181d05a" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.413963 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.414319 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:48 crc kubenswrapper[5003]: I0126 10:58:48.462898 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:49 crc kubenswrapper[5003]: I0126 10:58:49.012147 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" path="/var/lib/kubelet/pods/9459bb8b-1d5f-4868-9b32-0bf3491773d1/volumes" Jan 26 10:58:49 crc kubenswrapper[5003]: I0126 10:58:49.012817 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" path="/var/lib/kubelet/pods/a2e56acc-9065-4b0f-8b45-2549a989e156/volumes" Jan 26 10:58:49 crc kubenswrapper[5003]: I0126 10:58:49.062697 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" event={"ID":"9587e58d-66ff-4a24-8373-58c7d6946575","Type":"ContainerStarted","Data":"9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350"} Jan 26 10:58:49 crc kubenswrapper[5003]: I0126 10:58:49.062890 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:49 crc kubenswrapper[5003]: I0126 10:58:49.084734 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" podStartSLOduration=4.676235022 podStartE2EDuration="13.084714853s" podCreationTimestamp="2026-01-26 10:58:36 +0000 UTC" firstStartedPulling="2026-01-26 10:58:39.596392477 +0000 UTC m=+935.137618038" lastFinishedPulling="2026-01-26 10:58:48.004872308 +0000 UTC m=+943.546097869" observedRunningTime="2026-01-26 10:58:49.078768963 +0000 UTC m=+944.619994544" watchObservedRunningTime="2026-01-26 10:58:49.084714853 +0000 UTC m=+944.625940404" Jan 26 10:58:49 crc kubenswrapper[5003]: I0126 10:58:49.111004 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:49 crc kubenswrapper[5003]: I0126 10:58:49.706446 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 10:58:56 crc kubenswrapper[5003]: I0126 10:58:56.610250 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 10:58:57 crc kubenswrapper[5003]: I0126 10:58:57.080494 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-57zgr"] Jan 26 10:58:57 crc kubenswrapper[5003]: I0126 10:58:57.080720 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-57zgr" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerName="registry-server" containerID="cri-o://eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee" gracePeriod=2 Jan 26 10:58:57 crc kubenswrapper[5003]: I0126 10:58:57.976878 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.077563 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-catalog-content\") pod \"c1a64c88-6efd-429e-af0c-989e00daf4fe\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.077625 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brkw7\" (UniqueName: \"kubernetes.io/projected/c1a64c88-6efd-429e-af0c-989e00daf4fe-kube-api-access-brkw7\") pod \"c1a64c88-6efd-429e-af0c-989e00daf4fe\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.077669 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-utilities\") pod \"c1a64c88-6efd-429e-af0c-989e00daf4fe\" (UID: \"c1a64c88-6efd-429e-af0c-989e00daf4fe\") " Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.079069 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-utilities" (OuterVolumeSpecName: "utilities") pod "c1a64c88-6efd-429e-af0c-989e00daf4fe" (UID: "c1a64c88-6efd-429e-af0c-989e00daf4fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.083633 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1a64c88-6efd-429e-af0c-989e00daf4fe-kube-api-access-brkw7" (OuterVolumeSpecName: "kube-api-access-brkw7") pod "c1a64c88-6efd-429e-af0c-989e00daf4fe" (UID: "c1a64c88-6efd-429e-af0c-989e00daf4fe"). InnerVolumeSpecName "kube-api-access-brkw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.104026 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1a64c88-6efd-429e-af0c-989e00daf4fe" (UID: "c1a64c88-6efd-429e-af0c-989e00daf4fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.122511 5003 generic.go:334] "Generic (PLEG): container finished" podID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerID="eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee" exitCode=0 Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.122564 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57zgr" event={"ID":"c1a64c88-6efd-429e-af0c-989e00daf4fe","Type":"ContainerDied","Data":"eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee"} Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.122578 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57zgr" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.122597 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57zgr" event={"ID":"c1a64c88-6efd-429e-af0c-989e00daf4fe","Type":"ContainerDied","Data":"357cc79d0b903c1a124f51e374547597626b83f4a5e5dd996932dc6f7a529e89"} Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.122618 5003 scope.go:117] "RemoveContainer" containerID="eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.156507 5003 scope.go:117] "RemoveContainer" containerID="9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.162361 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-57zgr"] Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.168095 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-57zgr"] Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.179320 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.179351 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1a64c88-6efd-429e-af0c-989e00daf4fe-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.179363 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brkw7\" (UniqueName: \"kubernetes.io/projected/c1a64c88-6efd-429e-af0c-989e00daf4fe-kube-api-access-brkw7\") on node \"crc\" DevicePath \"\"" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.180001 5003 scope.go:117] "RemoveContainer" containerID="756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.206658 5003 scope.go:117] "RemoveContainer" containerID="eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee" Jan 26 10:58:58 crc kubenswrapper[5003]: E0126 10:58:58.207133 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee\": container with ID starting with eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee not found: ID does not exist" containerID="eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.207188 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee"} err="failed to get container status \"eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee\": rpc error: code = NotFound desc = could not find container \"eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee\": container with ID starting with eb1a58b5a9feb9990b03c7ddbe52ff7eebee1b3fcead258fa18b15a849cf53ee not found: ID does not exist" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.207218 5003 scope.go:117] "RemoveContainer" containerID="9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f" Jan 26 10:58:58 crc kubenswrapper[5003]: E0126 10:58:58.208218 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f\": container with ID starting with 9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f not found: ID does not exist" containerID="9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.208271 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f"} err="failed to get container status \"9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f\": rpc error: code = NotFound desc = could not find container \"9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f\": container with ID starting with 9c3e591638344764d1bc1c86c4559c12683b5c7301674976227fe0912b18276f not found: ID does not exist" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.208317 5003 scope.go:117] "RemoveContainer" containerID="756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185" Jan 26 10:58:58 crc kubenswrapper[5003]: E0126 10:58:58.208682 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185\": container with ID starting with 756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185 not found: ID does not exist" containerID="756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185" Jan 26 10:58:58 crc kubenswrapper[5003]: I0126 10:58:58.208888 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185"} err="failed to get container status \"756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185\": rpc error: code = NotFound desc = could not find container \"756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185\": container with ID starting with 756fa6f6382d4f591ff855afbd279febab4f28b3f166859c6b4a068879b71185 not found: ID does not exist" Jan 26 10:58:59 crc kubenswrapper[5003]: I0126 10:58:59.007766 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" path="/var/lib/kubelet/pods/c1a64c88-6efd-429e-af0c-989e00daf4fe/volumes" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.015696 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-db-create-chmzt"] Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016356 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerName="extract-utilities" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016373 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerName="extract-utilities" Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016393 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016402 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016412 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="extract-utilities" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016419 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="extract-utilities" Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016439 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="extract-content" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016446 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="extract-content" Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016453 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="extract-content" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016460 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="extract-content" Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016469 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016475 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016487 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016494 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016504 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerName="extract-content" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016513 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerName="extract-content" Jan 26 10:59:00 crc kubenswrapper[5003]: E0126 10:59:00.016527 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="extract-utilities" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016535 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="extract-utilities" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016674 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2e56acc-9065-4b0f-8b45-2549a989e156" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016695 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9459bb8b-1d5f-4868-9b32-0bf3491773d1" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.016707 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1a64c88-6efd-429e-af0c-989e00daf4fe" containerName="registry-server" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.017251 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.020294 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-2760-account-create-update-c4bzl"] Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.021291 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.023810 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-create-chmzt"] Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.026713 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-db-secret" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.038954 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-2760-account-create-update-c4bzl"] Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.104060 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dcrq\" (UniqueName: \"kubernetes.io/projected/e641b453-4123-45f4-8693-5cabd87d872b-kube-api-access-9dcrq\") pod \"keystone-db-create-chmzt\" (UID: \"e641b453-4123-45f4-8693-5cabd87d872b\") " pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.104167 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e641b453-4123-45f4-8693-5cabd87d872b-operator-scripts\") pod \"keystone-db-create-chmzt\" (UID: \"e641b453-4123-45f4-8693-5cabd87d872b\") " pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.104241 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8rsk\" (UniqueName: \"kubernetes.io/projected/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-kube-api-access-p8rsk\") pod \"keystone-2760-account-create-update-c4bzl\" (UID: \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\") " pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.104265 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-operator-scripts\") pod \"keystone-2760-account-create-update-c4bzl\" (UID: \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\") " pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.205239 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8rsk\" (UniqueName: \"kubernetes.io/projected/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-kube-api-access-p8rsk\") pod \"keystone-2760-account-create-update-c4bzl\" (UID: \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\") " pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.205523 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-operator-scripts\") pod \"keystone-2760-account-create-update-c4bzl\" (UID: \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\") " pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.205611 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dcrq\" (UniqueName: \"kubernetes.io/projected/e641b453-4123-45f4-8693-5cabd87d872b-kube-api-access-9dcrq\") pod \"keystone-db-create-chmzt\" (UID: \"e641b453-4123-45f4-8693-5cabd87d872b\") " pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.205720 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e641b453-4123-45f4-8693-5cabd87d872b-operator-scripts\") pod \"keystone-db-create-chmzt\" (UID: \"e641b453-4123-45f4-8693-5cabd87d872b\") " pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.206291 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-operator-scripts\") pod \"keystone-2760-account-create-update-c4bzl\" (UID: \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\") " pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.206485 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e641b453-4123-45f4-8693-5cabd87d872b-operator-scripts\") pod \"keystone-db-create-chmzt\" (UID: \"e641b453-4123-45f4-8693-5cabd87d872b\") " pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.230153 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8rsk\" (UniqueName: \"kubernetes.io/projected/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-kube-api-access-p8rsk\") pod \"keystone-2760-account-create-update-c4bzl\" (UID: \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\") " pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.243928 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dcrq\" (UniqueName: \"kubernetes.io/projected/e641b453-4123-45f4-8693-5cabd87d872b-kube-api-access-9dcrq\") pod \"keystone-db-create-chmzt\" (UID: \"e641b453-4123-45f4-8693-5cabd87d872b\") " pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.335251 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.351025 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.753238 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-create-chmzt"] Jan 26 10:59:00 crc kubenswrapper[5003]: I0126 10:59:00.860135 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-2760-account-create-update-c4bzl"] Jan 26 10:59:00 crc kubenswrapper[5003]: W0126 10:59:00.862348 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod524e27d6_7ca0_4387_a4ad_f32cbe1121c0.slice/crio-718b158dc54e9ca7c1b12d5a3e6bd1c05b86cc698170d9c4cf378656223a4b6e WatchSource:0}: Error finding container 718b158dc54e9ca7c1b12d5a3e6bd1c05b86cc698170d9c4cf378656223a4b6e: Status 404 returned error can't find the container with id 718b158dc54e9ca7c1b12d5a3e6bd1c05b86cc698170d9c4cf378656223a4b6e Jan 26 10:59:01 crc kubenswrapper[5003]: I0126 10:59:01.155224 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" event={"ID":"524e27d6-7ca0-4387-a4ad-f32cbe1121c0","Type":"ContainerStarted","Data":"718b158dc54e9ca7c1b12d5a3e6bd1c05b86cc698170d9c4cf378656223a4b6e"} Jan 26 10:59:01 crc kubenswrapper[5003]: I0126 10:59:01.156253 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-chmzt" event={"ID":"e641b453-4123-45f4-8693-5cabd87d872b","Type":"ContainerStarted","Data":"cd547a9c10456ed8d01bd3c02021220e9391c0e39035774e28aa25e58e890bc1"} Jan 26 10:59:02 crc kubenswrapper[5003]: I0126 10:59:02.164713 5003 generic.go:334] "Generic (PLEG): container finished" podID="e641b453-4123-45f4-8693-5cabd87d872b" containerID="6f3f3e65776fe839fbe7aee66407550a1bf403f49a368da1a168ddec2611e879" exitCode=0 Jan 26 10:59:02 crc kubenswrapper[5003]: I0126 10:59:02.164874 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-chmzt" event={"ID":"e641b453-4123-45f4-8693-5cabd87d872b","Type":"ContainerDied","Data":"6f3f3e65776fe839fbe7aee66407550a1bf403f49a368da1a168ddec2611e879"} Jan 26 10:59:02 crc kubenswrapper[5003]: I0126 10:59:02.166582 5003 generic.go:334] "Generic (PLEG): container finished" podID="524e27d6-7ca0-4387-a4ad-f32cbe1121c0" containerID="1bc756d14e4ef278dacef0892b46f5880a308907e1c2740d1ada43cb814c55b2" exitCode=0 Jan 26 10:59:02 crc kubenswrapper[5003]: I0126 10:59:02.166629 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" event={"ID":"524e27d6-7ca0-4387-a4ad-f32cbe1121c0","Type":"ContainerDied","Data":"1bc756d14e4ef278dacef0892b46f5880a308907e1c2740d1ada43cb814c55b2"} Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.510495 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.518964 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.649466 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e641b453-4123-45f4-8693-5cabd87d872b-operator-scripts\") pod \"e641b453-4123-45f4-8693-5cabd87d872b\" (UID: \"e641b453-4123-45f4-8693-5cabd87d872b\") " Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.649637 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-operator-scripts\") pod \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\" (UID: \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\") " Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.650314 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e641b453-4123-45f4-8693-5cabd87d872b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e641b453-4123-45f4-8693-5cabd87d872b" (UID: "e641b453-4123-45f4-8693-5cabd87d872b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.649675 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8rsk\" (UniqueName: \"kubernetes.io/projected/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-kube-api-access-p8rsk\") pod \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\" (UID: \"524e27d6-7ca0-4387-a4ad-f32cbe1121c0\") " Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.650488 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dcrq\" (UniqueName: \"kubernetes.io/projected/e641b453-4123-45f4-8693-5cabd87d872b-kube-api-access-9dcrq\") pod \"e641b453-4123-45f4-8693-5cabd87d872b\" (UID: \"e641b453-4123-45f4-8693-5cabd87d872b\") " Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.650612 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "524e27d6-7ca0-4387-a4ad-f32cbe1121c0" (UID: "524e27d6-7ca0-4387-a4ad-f32cbe1121c0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.650962 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.650979 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e641b453-4123-45f4-8693-5cabd87d872b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.654850 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-kube-api-access-p8rsk" (OuterVolumeSpecName: "kube-api-access-p8rsk") pod "524e27d6-7ca0-4387-a4ad-f32cbe1121c0" (UID: "524e27d6-7ca0-4387-a4ad-f32cbe1121c0"). InnerVolumeSpecName "kube-api-access-p8rsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.655411 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e641b453-4123-45f4-8693-5cabd87d872b-kube-api-access-9dcrq" (OuterVolumeSpecName: "kube-api-access-9dcrq") pod "e641b453-4123-45f4-8693-5cabd87d872b" (UID: "e641b453-4123-45f4-8693-5cabd87d872b"). InnerVolumeSpecName "kube-api-access-9dcrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.752312 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8rsk\" (UniqueName: \"kubernetes.io/projected/524e27d6-7ca0-4387-a4ad-f32cbe1121c0-kube-api-access-p8rsk\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:03 crc kubenswrapper[5003]: I0126 10:59:03.752350 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dcrq\" (UniqueName: \"kubernetes.io/projected/e641b453-4123-45f4-8693-5cabd87d872b-kube-api-access-9dcrq\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.179416 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-chmzt" event={"ID":"e641b453-4123-45f4-8693-5cabd87d872b","Type":"ContainerDied","Data":"cd547a9c10456ed8d01bd3c02021220e9391c0e39035774e28aa25e58e890bc1"} Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.179457 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd547a9c10456ed8d01bd3c02021220e9391c0e39035774e28aa25e58e890bc1" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.179455 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-chmzt" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.180329 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" event={"ID":"524e27d6-7ca0-4387-a4ad-f32cbe1121c0","Type":"ContainerDied","Data":"718b158dc54e9ca7c1b12d5a3e6bd1c05b86cc698170d9c4cf378656223a4b6e"} Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.180352 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="718b158dc54e9ca7c1b12d5a3e6bd1c05b86cc698170d9c4cf378656223a4b6e" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.180399 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-2760-account-create-update-c4bzl" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.693374 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-index-r8mc9"] Jan 26 10:59:04 crc kubenswrapper[5003]: E0126 10:59:04.693657 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="524e27d6-7ca0-4387-a4ad-f32cbe1121c0" containerName="mariadb-account-create-update" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.693671 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="524e27d6-7ca0-4387-a4ad-f32cbe1121c0" containerName="mariadb-account-create-update" Jan 26 10:59:04 crc kubenswrapper[5003]: E0126 10:59:04.693690 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e641b453-4123-45f4-8693-5cabd87d872b" containerName="mariadb-database-create" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.693709 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e641b453-4123-45f4-8693-5cabd87d872b" containerName="mariadb-database-create" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.693829 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e641b453-4123-45f4-8693-5cabd87d872b" containerName="mariadb-database-create" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.693840 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="524e27d6-7ca0-4387-a4ad-f32cbe1121c0" containerName="mariadb-account-create-update" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.694228 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.696472 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-index-dockercfg-4ft95" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.700020 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-r8mc9"] Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.767445 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvz8c\" (UniqueName: \"kubernetes.io/projected/68526bd8-8a1a-478b-8b12-3333bcaf29c8-kube-api-access-kvz8c\") pod \"barbican-operator-index-r8mc9\" (UID: \"68526bd8-8a1a-478b-8b12-3333bcaf29c8\") " pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.868806 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvz8c\" (UniqueName: \"kubernetes.io/projected/68526bd8-8a1a-478b-8b12-3333bcaf29c8-kube-api-access-kvz8c\") pod \"barbican-operator-index-r8mc9\" (UID: \"68526bd8-8a1a-478b-8b12-3333bcaf29c8\") " pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:04 crc kubenswrapper[5003]: I0126 10:59:04.886468 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvz8c\" (UniqueName: \"kubernetes.io/projected/68526bd8-8a1a-478b-8b12-3333bcaf29c8-kube-api-access-kvz8c\") pod \"barbican-operator-index-r8mc9\" (UID: \"68526bd8-8a1a-478b-8b12-3333bcaf29c8\") " pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.012852 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-index-dockercfg-4ft95" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.020184 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.476456 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2g92"] Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.478376 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.480656 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.481159 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-jcz5p" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.481162 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.481425 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2g92"] Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.481544 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.579975 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wnbl\" (UniqueName: \"kubernetes.io/projected/e14a5862-4c4d-4a12-b110-03285b32d28c-kube-api-access-2wnbl\") pod \"keystone-db-sync-r2g92\" (UID: \"e14a5862-4c4d-4a12-b110-03285b32d28c\") " pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.580062 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14a5862-4c4d-4a12-b110-03285b32d28c-config-data\") pod \"keystone-db-sync-r2g92\" (UID: \"e14a5862-4c4d-4a12-b110-03285b32d28c\") " pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.642501 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-r8mc9"] Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.651333 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.681978 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wnbl\" (UniqueName: \"kubernetes.io/projected/e14a5862-4c4d-4a12-b110-03285b32d28c-kube-api-access-2wnbl\") pod \"keystone-db-sync-r2g92\" (UID: \"e14a5862-4c4d-4a12-b110-03285b32d28c\") " pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.682193 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14a5862-4c4d-4a12-b110-03285b32d28c-config-data\") pod \"keystone-db-sync-r2g92\" (UID: \"e14a5862-4c4d-4a12-b110-03285b32d28c\") " pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.688412 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14a5862-4c4d-4a12-b110-03285b32d28c-config-data\") pod \"keystone-db-sync-r2g92\" (UID: \"e14a5862-4c4d-4a12-b110-03285b32d28c\") " pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.702647 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wnbl\" (UniqueName: \"kubernetes.io/projected/e14a5862-4c4d-4a12-b110-03285b32d28c-kube-api-access-2wnbl\") pod \"keystone-db-sync-r2g92\" (UID: \"e14a5862-4c4d-4a12-b110-03285b32d28c\") " pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:05 crc kubenswrapper[5003]: I0126 10:59:05.793449 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:06 crc kubenswrapper[5003]: I0126 10:59:06.195312 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-r8mc9" event={"ID":"68526bd8-8a1a-478b-8b12-3333bcaf29c8","Type":"ContainerStarted","Data":"a7b4b45a7dba931eb3df6ce15abed7f0f4c1971d93dc90adfe9c36cd25acf9c1"} Jan 26 10:59:06 crc kubenswrapper[5003]: I0126 10:59:06.211695 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2g92"] Jan 26 10:59:06 crc kubenswrapper[5003]: W0126 10:59:06.217153 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode14a5862_4c4d_4a12_b110_03285b32d28c.slice/crio-bba555df9847c3b0500ae60224a99820f13d4567342e76b24fada87a0b71788a WatchSource:0}: Error finding container bba555df9847c3b0500ae60224a99820f13d4567342e76b24fada87a0b71788a: Status 404 returned error can't find the container with id bba555df9847c3b0500ae60224a99820f13d4567342e76b24fada87a0b71788a Jan 26 10:59:07 crc kubenswrapper[5003]: I0126 10:59:07.203101 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-r2g92" event={"ID":"e14a5862-4c4d-4a12-b110-03285b32d28c","Type":"ContainerStarted","Data":"bba555df9847c3b0500ae60224a99820f13d4567342e76b24fada87a0b71788a"} Jan 26 10:59:07 crc kubenswrapper[5003]: I0126 10:59:07.205551 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-r8mc9" event={"ID":"68526bd8-8a1a-478b-8b12-3333bcaf29c8","Type":"ContainerStarted","Data":"ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40"} Jan 26 10:59:07 crc kubenswrapper[5003]: I0126 10:59:07.226625 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-index-r8mc9" podStartSLOduration=2.166752428 podStartE2EDuration="3.226606474s" podCreationTimestamp="2026-01-26 10:59:04 +0000 UTC" firstStartedPulling="2026-01-26 10:59:05.651096107 +0000 UTC m=+961.192321668" lastFinishedPulling="2026-01-26 10:59:06.710950153 +0000 UTC m=+962.252175714" observedRunningTime="2026-01-26 10:59:07.217832345 +0000 UTC m=+962.759057906" watchObservedRunningTime="2026-01-26 10:59:07.226606474 +0000 UTC m=+962.767832035" Jan 26 10:59:09 crc kubenswrapper[5003]: I0126 10:59:09.039919 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 10:59:09 crc kubenswrapper[5003]: I0126 10:59:09.040233 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 10:59:09 crc kubenswrapper[5003]: I0126 10:59:09.040298 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 10:59:09 crc kubenswrapper[5003]: I0126 10:59:09.041841 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f3eab31ad2a64d16b429c7fff6c1ada069433f73eabf4567b3026431fe989a0c"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 10:59:09 crc kubenswrapper[5003]: I0126 10:59:09.042005 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://f3eab31ad2a64d16b429c7fff6c1ada069433f73eabf4567b3026431fe989a0c" gracePeriod=600 Jan 26 10:59:10 crc kubenswrapper[5003]: I0126 10:59:10.242518 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="f3eab31ad2a64d16b429c7fff6c1ada069433f73eabf4567b3026431fe989a0c" exitCode=0 Jan 26 10:59:10 crc kubenswrapper[5003]: I0126 10:59:10.242567 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"f3eab31ad2a64d16b429c7fff6c1ada069433f73eabf4567b3026431fe989a0c"} Jan 26 10:59:10 crc kubenswrapper[5003]: I0126 10:59:10.242603 5003 scope.go:117] "RemoveContainer" containerID="4dd55168d07d12b4dda1e126f43b86ddabeac34b8ea63b9c2a281cb6276edb9b" Jan 26 10:59:15 crc kubenswrapper[5003]: I0126 10:59:15.021013 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:15 crc kubenswrapper[5003]: I0126 10:59:15.021614 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:15 crc kubenswrapper[5003]: I0126 10:59:15.049180 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:15 crc kubenswrapper[5003]: I0126 10:59:15.276433 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"e95ef3044b7da7897332a1c0dc0a352de84ea5dd8273e8eb61313248ed95c0df"} Jan 26 10:59:15 crc kubenswrapper[5003]: I0126 10:59:15.280848 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-r2g92" event={"ID":"e14a5862-4c4d-4a12-b110-03285b32d28c","Type":"ContainerStarted","Data":"ce3d943c634bc4c237bdb22ca4c56310b8e4f127467461e36d1d81f0e980aa04"} Jan 26 10:59:15 crc kubenswrapper[5003]: I0126 10:59:15.321198 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-db-sync-r2g92" podStartSLOduration=1.746659172 podStartE2EDuration="10.321175529s" podCreationTimestamp="2026-01-26 10:59:05 +0000 UTC" firstStartedPulling="2026-01-26 10:59:06.219795959 +0000 UTC m=+961.761021510" lastFinishedPulling="2026-01-26 10:59:14.794312306 +0000 UTC m=+970.335537867" observedRunningTime="2026-01-26 10:59:15.311074699 +0000 UTC m=+970.852300270" watchObservedRunningTime="2026-01-26 10:59:15.321175529 +0000 UTC m=+970.862401090" Jan 26 10:59:15 crc kubenswrapper[5003]: I0126 10:59:15.323340 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.332990 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5"] Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.334253 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.336018 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-x8f6q" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.344545 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5"] Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.439925 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-util\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.440049 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-bundle\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.440123 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98gth\" (UniqueName: \"kubernetes.io/projected/8cedfada-2004-4908-b8eb-cbc066c92dd9-kube-api-access-98gth\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.541353 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-util\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.541410 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-bundle\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.541448 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98gth\" (UniqueName: \"kubernetes.io/projected/8cedfada-2004-4908-b8eb-cbc066c92dd9-kube-api-access-98gth\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.541961 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-bundle\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.542175 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-util\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.559724 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98gth\" (UniqueName: \"kubernetes.io/projected/8cedfada-2004-4908-b8eb-cbc066c92dd9-kube-api-access-98gth\") pod \"70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:16 crc kubenswrapper[5003]: I0126 10:59:16.656623 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:17 crc kubenswrapper[5003]: I0126 10:59:17.076607 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5"] Jan 26 10:59:17 crc kubenswrapper[5003]: W0126 10:59:17.079974 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cedfada_2004_4908_b8eb_cbc066c92dd9.slice/crio-24ef04db55b77eef07e20993f97e5d7ecaa0c1a6f303ee928b061b14fbf681a9 WatchSource:0}: Error finding container 24ef04db55b77eef07e20993f97e5d7ecaa0c1a6f303ee928b061b14fbf681a9: Status 404 returned error can't find the container with id 24ef04db55b77eef07e20993f97e5d7ecaa0c1a6f303ee928b061b14fbf681a9 Jan 26 10:59:17 crc kubenswrapper[5003]: I0126 10:59:17.294908 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" event={"ID":"8cedfada-2004-4908-b8eb-cbc066c92dd9","Type":"ContainerStarted","Data":"1370bc94a109255dfb407d6634c6abce5f17de89a132815fb30a26877826d375"} Jan 26 10:59:17 crc kubenswrapper[5003]: I0126 10:59:17.295205 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" event={"ID":"8cedfada-2004-4908-b8eb-cbc066c92dd9","Type":"ContainerStarted","Data":"24ef04db55b77eef07e20993f97e5d7ecaa0c1a6f303ee928b061b14fbf681a9"} Jan 26 10:59:18 crc kubenswrapper[5003]: I0126 10:59:18.301748 5003 generic.go:334] "Generic (PLEG): container finished" podID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerID="1370bc94a109255dfb407d6634c6abce5f17de89a132815fb30a26877826d375" exitCode=0 Jan 26 10:59:18 crc kubenswrapper[5003]: I0126 10:59:18.301787 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" event={"ID":"8cedfada-2004-4908-b8eb-cbc066c92dd9","Type":"ContainerDied","Data":"1370bc94a109255dfb407d6634c6abce5f17de89a132815fb30a26877826d375"} Jan 26 10:59:19 crc kubenswrapper[5003]: I0126 10:59:19.309038 5003 generic.go:334] "Generic (PLEG): container finished" podID="e14a5862-4c4d-4a12-b110-03285b32d28c" containerID="ce3d943c634bc4c237bdb22ca4c56310b8e4f127467461e36d1d81f0e980aa04" exitCode=0 Jan 26 10:59:19 crc kubenswrapper[5003]: I0126 10:59:19.309145 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-r2g92" event={"ID":"e14a5862-4c4d-4a12-b110-03285b32d28c","Type":"ContainerDied","Data":"ce3d943c634bc4c237bdb22ca4c56310b8e4f127467461e36d1d81f0e980aa04"} Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.316219 5003 generic.go:334] "Generic (PLEG): container finished" podID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerID="3de834bcaa9728f6de3d5a7c498b95dc2eca6b23f753d5e240d7512bba4a6348" exitCode=0 Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.316323 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" event={"ID":"8cedfada-2004-4908-b8eb-cbc066c92dd9","Type":"ContainerDied","Data":"3de834bcaa9728f6de3d5a7c498b95dc2eca6b23f753d5e240d7512bba4a6348"} Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.589559 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.709951 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14a5862-4c4d-4a12-b110-03285b32d28c-config-data\") pod \"e14a5862-4c4d-4a12-b110-03285b32d28c\" (UID: \"e14a5862-4c4d-4a12-b110-03285b32d28c\") " Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.710305 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wnbl\" (UniqueName: \"kubernetes.io/projected/e14a5862-4c4d-4a12-b110-03285b32d28c-kube-api-access-2wnbl\") pod \"e14a5862-4c4d-4a12-b110-03285b32d28c\" (UID: \"e14a5862-4c4d-4a12-b110-03285b32d28c\") " Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.715423 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e14a5862-4c4d-4a12-b110-03285b32d28c-kube-api-access-2wnbl" (OuterVolumeSpecName: "kube-api-access-2wnbl") pod "e14a5862-4c4d-4a12-b110-03285b32d28c" (UID: "e14a5862-4c4d-4a12-b110-03285b32d28c"). InnerVolumeSpecName "kube-api-access-2wnbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.739667 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e14a5862-4c4d-4a12-b110-03285b32d28c-config-data" (OuterVolumeSpecName: "config-data") pod "e14a5862-4c4d-4a12-b110-03285b32d28c" (UID: "e14a5862-4c4d-4a12-b110-03285b32d28c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.812124 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wnbl\" (UniqueName: \"kubernetes.io/projected/e14a5862-4c4d-4a12-b110-03285b32d28c-kube-api-access-2wnbl\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:20 crc kubenswrapper[5003]: I0126 10:59:20.812154 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14a5862-4c4d-4a12-b110-03285b32d28c-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.329560 5003 generic.go:334] "Generic (PLEG): container finished" podID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerID="f60501dbdbcd028f28f0130101ec8e3936b9f30a713747db3a54ffd6c99af577" exitCode=0 Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.329638 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" event={"ID":"8cedfada-2004-4908-b8eb-cbc066c92dd9","Type":"ContainerDied","Data":"f60501dbdbcd028f28f0130101ec8e3936b9f30a713747db3a54ffd6c99af577"} Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.332383 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-r2g92" event={"ID":"e14a5862-4c4d-4a12-b110-03285b32d28c","Type":"ContainerDied","Data":"bba555df9847c3b0500ae60224a99820f13d4567342e76b24fada87a0b71788a"} Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.332414 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bba555df9847c3b0500ae60224a99820f13d4567342e76b24fada87a0b71788a" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.332487 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-r2g92" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.520703 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-vmrv9"] Jan 26 10:59:21 crc kubenswrapper[5003]: E0126 10:59:21.521122 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e14a5862-4c4d-4a12-b110-03285b32d28c" containerName="keystone-db-sync" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.521151 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e14a5862-4c4d-4a12-b110-03285b32d28c" containerName="keystone-db-sync" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.521445 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e14a5862-4c4d-4a12-b110-03285b32d28c" containerName="keystone-db-sync" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.522130 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.524600 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.529112 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.529332 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.531409 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-jcz5p" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.531625 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"osp-secret" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.535559 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-vmrv9"] Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.627156 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-scripts\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.627221 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-config-data\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.627259 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xhvq\" (UniqueName: \"kubernetes.io/projected/3f577648-3365-45a7-99d9-676747c83c31-kube-api-access-4xhvq\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.627326 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-credential-keys\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.627369 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-fernet-keys\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.728273 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-fernet-keys\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.728386 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-scripts\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.728413 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-config-data\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.728442 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xhvq\" (UniqueName: \"kubernetes.io/projected/3f577648-3365-45a7-99d9-676747c83c31-kube-api-access-4xhvq\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.728492 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-credential-keys\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.731924 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-credential-keys\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.732189 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-config-data\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.732233 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-fernet-keys\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.745598 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-scripts\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.750089 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xhvq\" (UniqueName: \"kubernetes.io/projected/3f577648-3365-45a7-99d9-676747c83c31-kube-api-access-4xhvq\") pod \"keystone-bootstrap-vmrv9\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:21 crc kubenswrapper[5003]: I0126 10:59:21.843074 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.258113 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-vmrv9"] Jan 26 10:59:22 crc kubenswrapper[5003]: W0126 10:59:22.263104 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f577648_3365_45a7_99d9_676747c83c31.slice/crio-1a56886fca22f66356b5df37dd7ad0840880cdb4a521764200c22ae0f0929c21 WatchSource:0}: Error finding container 1a56886fca22f66356b5df37dd7ad0840880cdb4a521764200c22ae0f0929c21: Status 404 returned error can't find the container with id 1a56886fca22f66356b5df37dd7ad0840880cdb4a521764200c22ae0f0929c21 Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.338895 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" event={"ID":"3f577648-3365-45a7-99d9-676747c83c31","Type":"ContainerStarted","Data":"1a56886fca22f66356b5df37dd7ad0840880cdb4a521764200c22ae0f0929c21"} Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.592180 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.641564 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-util\") pod \"8cedfada-2004-4908-b8eb-cbc066c92dd9\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.641675 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98gth\" (UniqueName: \"kubernetes.io/projected/8cedfada-2004-4908-b8eb-cbc066c92dd9-kube-api-access-98gth\") pod \"8cedfada-2004-4908-b8eb-cbc066c92dd9\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.641739 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-bundle\") pod \"8cedfada-2004-4908-b8eb-cbc066c92dd9\" (UID: \"8cedfada-2004-4908-b8eb-cbc066c92dd9\") " Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.642829 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-bundle" (OuterVolumeSpecName: "bundle") pod "8cedfada-2004-4908-b8eb-cbc066c92dd9" (UID: "8cedfada-2004-4908-b8eb-cbc066c92dd9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.643115 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.645658 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cedfada-2004-4908-b8eb-cbc066c92dd9-kube-api-access-98gth" (OuterVolumeSpecName: "kube-api-access-98gth") pod "8cedfada-2004-4908-b8eb-cbc066c92dd9" (UID: "8cedfada-2004-4908-b8eb-cbc066c92dd9"). InnerVolumeSpecName "kube-api-access-98gth". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.655172 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-util" (OuterVolumeSpecName: "util") pod "8cedfada-2004-4908-b8eb-cbc066c92dd9" (UID: "8cedfada-2004-4908-b8eb-cbc066c92dd9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.748994 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8cedfada-2004-4908-b8eb-cbc066c92dd9-util\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:22 crc kubenswrapper[5003]: I0126 10:59:22.749025 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98gth\" (UniqueName: \"kubernetes.io/projected/8cedfada-2004-4908-b8eb-cbc066c92dd9-kube-api-access-98gth\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:23 crc kubenswrapper[5003]: I0126 10:59:23.348426 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" event={"ID":"8cedfada-2004-4908-b8eb-cbc066c92dd9","Type":"ContainerDied","Data":"24ef04db55b77eef07e20993f97e5d7ecaa0c1a6f303ee928b061b14fbf681a9"} Jan 26 10:59:23 crc kubenswrapper[5003]: I0126 10:59:23.348709 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24ef04db55b77eef07e20993f97e5d7ecaa0c1a6f303ee928b061b14fbf681a9" Jan 26 10:59:23 crc kubenswrapper[5003]: I0126 10:59:23.348513 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5" Jan 26 10:59:23 crc kubenswrapper[5003]: I0126 10:59:23.354075 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" event={"ID":"3f577648-3365-45a7-99d9-676747c83c31","Type":"ContainerStarted","Data":"96b5fa51591d730dbcb0f4e2b6c193455cdaf0094140fe5682f912c818aa1316"} Jan 26 10:59:23 crc kubenswrapper[5003]: I0126 10:59:23.370907 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" podStartSLOduration=2.370890966 podStartE2EDuration="2.370890966s" podCreationTimestamp="2026-01-26 10:59:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:59:23.367820078 +0000 UTC m=+978.909045659" watchObservedRunningTime="2026-01-26 10:59:23.370890966 +0000 UTC m=+978.912116527" Jan 26 10:59:25 crc kubenswrapper[5003]: I0126 10:59:25.369023 5003 generic.go:334] "Generic (PLEG): container finished" podID="3f577648-3365-45a7-99d9-676747c83c31" containerID="96b5fa51591d730dbcb0f4e2b6c193455cdaf0094140fe5682f912c818aa1316" exitCode=0 Jan 26 10:59:25 crc kubenswrapper[5003]: I0126 10:59:25.369095 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" event={"ID":"3f577648-3365-45a7-99d9-676747c83c31","Type":"ContainerDied","Data":"96b5fa51591d730dbcb0f4e2b6c193455cdaf0094140fe5682f912c818aa1316"} Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.732463 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.810484 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-scripts\") pod \"3f577648-3365-45a7-99d9-676747c83c31\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.810525 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-credential-keys\") pod \"3f577648-3365-45a7-99d9-676747c83c31\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.810600 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-fernet-keys\") pod \"3f577648-3365-45a7-99d9-676747c83c31\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.810645 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-config-data\") pod \"3f577648-3365-45a7-99d9-676747c83c31\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.810724 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xhvq\" (UniqueName: \"kubernetes.io/projected/3f577648-3365-45a7-99d9-676747c83c31-kube-api-access-4xhvq\") pod \"3f577648-3365-45a7-99d9-676747c83c31\" (UID: \"3f577648-3365-45a7-99d9-676747c83c31\") " Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.816851 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3f577648-3365-45a7-99d9-676747c83c31" (UID: "3f577648-3365-45a7-99d9-676747c83c31"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.817519 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-scripts" (OuterVolumeSpecName: "scripts") pod "3f577648-3365-45a7-99d9-676747c83c31" (UID: "3f577648-3365-45a7-99d9-676747c83c31"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.818056 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3f577648-3365-45a7-99d9-676747c83c31" (UID: "3f577648-3365-45a7-99d9-676747c83c31"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.818892 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f577648-3365-45a7-99d9-676747c83c31-kube-api-access-4xhvq" (OuterVolumeSpecName: "kube-api-access-4xhvq") pod "3f577648-3365-45a7-99d9-676747c83c31" (UID: "3f577648-3365-45a7-99d9-676747c83c31"). InnerVolumeSpecName "kube-api-access-4xhvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.835685 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-config-data" (OuterVolumeSpecName: "config-data") pod "3f577648-3365-45a7-99d9-676747c83c31" (UID: "3f577648-3365-45a7-99d9-676747c83c31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.912242 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.912301 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.912314 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.912328 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xhvq\" (UniqueName: \"kubernetes.io/projected/3f577648-3365-45a7-99d9-676747c83c31-kube-api-access-4xhvq\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:26 crc kubenswrapper[5003]: I0126 10:59:26.912342 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f577648-3365-45a7-99d9-676747c83c31-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.384496 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" event={"ID":"3f577648-3365-45a7-99d9-676747c83c31","Type":"ContainerDied","Data":"1a56886fca22f66356b5df37dd7ad0840880cdb4a521764200c22ae0f0929c21"} Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.384535 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a56886fca22f66356b5df37dd7ad0840880cdb4a521764200c22ae0f0929c21" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.384637 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-vmrv9" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.458440 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-f9445869f-dzj8v"] Jan 26 10:59:27 crc kubenswrapper[5003]: E0126 10:59:27.458712 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerName="util" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.458729 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerName="util" Jan 26 10:59:27 crc kubenswrapper[5003]: E0126 10:59:27.458741 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerName="pull" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.458748 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerName="pull" Jan 26 10:59:27 crc kubenswrapper[5003]: E0126 10:59:27.458759 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerName="extract" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.458766 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerName="extract" Jan 26 10:59:27 crc kubenswrapper[5003]: E0126 10:59:27.458779 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f577648-3365-45a7-99d9-676747c83c31" containerName="keystone-bootstrap" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.458786 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f577648-3365-45a7-99d9-676747c83c31" containerName="keystone-bootstrap" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.458928 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f577648-3365-45a7-99d9-676747c83c31" containerName="keystone-bootstrap" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.458941 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cedfada-2004-4908-b8eb-cbc066c92dd9" containerName="extract" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.459431 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.461858 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-jcz5p" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.462187 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.462436 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.465247 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.472675 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-f9445869f-dzj8v"] Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.518557 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk5zs\" (UniqueName: \"kubernetes.io/projected/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-kube-api-access-gk5zs\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.518638 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-credential-keys\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.518722 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-scripts\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.518754 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-fernet-keys\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.518775 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-config-data\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.619506 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-credential-keys\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.619604 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-scripts\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.619632 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-fernet-keys\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.619651 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-config-data\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.619675 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk5zs\" (UniqueName: \"kubernetes.io/projected/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-kube-api-access-gk5zs\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.623938 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-credential-keys\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.624042 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-fernet-keys\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.624909 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-config-data\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.625534 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-scripts\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.638824 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk5zs\" (UniqueName: \"kubernetes.io/projected/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-kube-api-access-gk5zs\") pod \"keystone-f9445869f-dzj8v\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:27 crc kubenswrapper[5003]: I0126 10:59:27.774587 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:28 crc kubenswrapper[5003]: I0126 10:59:28.252603 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-f9445869f-dzj8v"] Jan 26 10:59:28 crc kubenswrapper[5003]: I0126 10:59:28.392241 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" event={"ID":"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c","Type":"ContainerStarted","Data":"a7e7db0eed8633ec3556471a345335d661b24273d9f37cf02b1f4bc34ad5ccd2"} Jan 26 10:59:29 crc kubenswrapper[5003]: I0126 10:59:29.399625 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" event={"ID":"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c","Type":"ContainerStarted","Data":"6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a"} Jan 26 10:59:29 crc kubenswrapper[5003]: I0126 10:59:29.400996 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:29 crc kubenswrapper[5003]: I0126 10:59:29.418933 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" podStartSLOduration=2.418909418 podStartE2EDuration="2.418909418s" podCreationTimestamp="2026-01-26 10:59:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 10:59:29.414870662 +0000 UTC m=+984.956096223" watchObservedRunningTime="2026-01-26 10:59:29.418909418 +0000 UTC m=+984.960134979" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.386125 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg"] Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.388554 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.391894 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-5hsfk" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.392174 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-service-cert" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.458834 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg"] Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.480859 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-apiservice-cert\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.481176 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqblg\" (UniqueName: \"kubernetes.io/projected/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-kube-api-access-vqblg\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.481329 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-webhook-cert\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.582517 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqblg\" (UniqueName: \"kubernetes.io/projected/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-kube-api-access-vqblg\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.582589 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-webhook-cert\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.582662 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-apiservice-cert\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.594990 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-webhook-cert\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.597083 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-apiservice-cert\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.597705 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqblg\" (UniqueName: \"kubernetes.io/projected/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-kube-api-access-vqblg\") pod \"barbican-operator-controller-manager-759678c54b-zj6mg\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:38 crc kubenswrapper[5003]: I0126 10:59:38.715839 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:39 crc kubenswrapper[5003]: I0126 10:59:39.181690 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg"] Jan 26 10:59:39 crc kubenswrapper[5003]: I0126 10:59:39.469507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" event={"ID":"1fedfbd6-7026-49ff-b4cd-bc52a093e02a","Type":"ContainerStarted","Data":"6a2e4058eed0ace21adc161fc1863f327601179241e40a0921b449cb01448749"} Jan 26 10:59:42 crc kubenswrapper[5003]: I0126 10:59:42.488057 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" event={"ID":"1fedfbd6-7026-49ff-b4cd-bc52a093e02a","Type":"ContainerStarted","Data":"8360016c2d0f656d8d226388fceecd8f45d0f8acf100692ff2ffbf98ad3c54d8"} Jan 26 10:59:42 crc kubenswrapper[5003]: I0126 10:59:42.488637 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:42 crc kubenswrapper[5003]: I0126 10:59:42.504435 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" podStartSLOduration=1.984604794 podStartE2EDuration="4.504418902s" podCreationTimestamp="2026-01-26 10:59:38 +0000 UTC" firstStartedPulling="2026-01-26 10:59:39.197132886 +0000 UTC m=+994.738358447" lastFinishedPulling="2026-01-26 10:59:41.716946984 +0000 UTC m=+997.258172555" observedRunningTime="2026-01-26 10:59:42.502598919 +0000 UTC m=+998.043824490" watchObservedRunningTime="2026-01-26 10:59:42.504418902 +0000 UTC m=+998.045644463" Jan 26 10:59:48 crc kubenswrapper[5003]: I0126 10:59:48.719845 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.053407 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-db-create-9cxv7"] Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.054512 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.060826 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr"] Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.062015 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.064913 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-db-secret" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.072923 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr"] Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.085796 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-create-9cxv7"] Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.153878 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvlth\" (UniqueName: \"kubernetes.io/projected/a7e67ef6-3327-4473-a44e-3c65226ce5db-kube-api-access-xvlth\") pod \"barbican-db-create-9cxv7\" (UID: \"a7e67ef6-3327-4473-a44e-3c65226ce5db\") " pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.154214 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncb26\" (UniqueName: \"kubernetes.io/projected/ac2063ed-42ef-4188-826b-a69356f49a65-kube-api-access-ncb26\") pod \"barbican-cbfe-account-create-update-dzdjr\" (UID: \"ac2063ed-42ef-4188-826b-a69356f49a65\") " pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.154327 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac2063ed-42ef-4188-826b-a69356f49a65-operator-scripts\") pod \"barbican-cbfe-account-create-update-dzdjr\" (UID: \"ac2063ed-42ef-4188-826b-a69356f49a65\") " pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.154349 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7e67ef6-3327-4473-a44e-3c65226ce5db-operator-scripts\") pod \"barbican-db-create-9cxv7\" (UID: \"a7e67ef6-3327-4473-a44e-3c65226ce5db\") " pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.256099 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac2063ed-42ef-4188-826b-a69356f49a65-operator-scripts\") pod \"barbican-cbfe-account-create-update-dzdjr\" (UID: \"ac2063ed-42ef-4188-826b-a69356f49a65\") " pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.256143 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7e67ef6-3327-4473-a44e-3c65226ce5db-operator-scripts\") pod \"barbican-db-create-9cxv7\" (UID: \"a7e67ef6-3327-4473-a44e-3c65226ce5db\") " pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.256184 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvlth\" (UniqueName: \"kubernetes.io/projected/a7e67ef6-3327-4473-a44e-3c65226ce5db-kube-api-access-xvlth\") pod \"barbican-db-create-9cxv7\" (UID: \"a7e67ef6-3327-4473-a44e-3c65226ce5db\") " pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.256536 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncb26\" (UniqueName: \"kubernetes.io/projected/ac2063ed-42ef-4188-826b-a69356f49a65-kube-api-access-ncb26\") pod \"barbican-cbfe-account-create-update-dzdjr\" (UID: \"ac2063ed-42ef-4188-826b-a69356f49a65\") " pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.256876 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac2063ed-42ef-4188-826b-a69356f49a65-operator-scripts\") pod \"barbican-cbfe-account-create-update-dzdjr\" (UID: \"ac2063ed-42ef-4188-826b-a69356f49a65\") " pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.257010 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7e67ef6-3327-4473-a44e-3c65226ce5db-operator-scripts\") pod \"barbican-db-create-9cxv7\" (UID: \"a7e67ef6-3327-4473-a44e-3c65226ce5db\") " pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.282055 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvlth\" (UniqueName: \"kubernetes.io/projected/a7e67ef6-3327-4473-a44e-3c65226ce5db-kube-api-access-xvlth\") pod \"barbican-db-create-9cxv7\" (UID: \"a7e67ef6-3327-4473-a44e-3c65226ce5db\") " pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.312398 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncb26\" (UniqueName: \"kubernetes.io/projected/ac2063ed-42ef-4188-826b-a69356f49a65-kube-api-access-ncb26\") pod \"barbican-cbfe-account-create-update-dzdjr\" (UID: \"ac2063ed-42ef-4188-826b-a69356f49a65\") " pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.389464 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.396175 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.823829 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr"] Jan 26 10:59:50 crc kubenswrapper[5003]: W0126 10:59:50.833473 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac2063ed_42ef_4188_826b_a69356f49a65.slice/crio-dae1c3fa5bea031fd5f620cc8769d6ce7d3e3a3cf88bf1b907749801fc3ed471 WatchSource:0}: Error finding container dae1c3fa5bea031fd5f620cc8769d6ce7d3e3a3cf88bf1b907749801fc3ed471: Status 404 returned error can't find the container with id dae1c3fa5bea031fd5f620cc8769d6ce7d3e3a3cf88bf1b907749801fc3ed471 Jan 26 10:59:50 crc kubenswrapper[5003]: I0126 10:59:50.929138 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-create-9cxv7"] Jan 26 10:59:50 crc kubenswrapper[5003]: W0126 10:59:50.935327 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7e67ef6_3327_4473_a44e_3c65226ce5db.slice/crio-648e3f876cb658026cad053f2a6f9b73d5b9415a09c874a465ae4f84b43d061d WatchSource:0}: Error finding container 648e3f876cb658026cad053f2a6f9b73d5b9415a09c874a465ae4f84b43d061d: Status 404 returned error can't find the container with id 648e3f876cb658026cad053f2a6f9b73d5b9415a09c874a465ae4f84b43d061d Jan 26 10:59:51 crc kubenswrapper[5003]: I0126 10:59:51.555770 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-9cxv7" event={"ID":"a7e67ef6-3327-4473-a44e-3c65226ce5db","Type":"ContainerStarted","Data":"648e3f876cb658026cad053f2a6f9b73d5b9415a09c874a465ae4f84b43d061d"} Jan 26 10:59:51 crc kubenswrapper[5003]: I0126 10:59:51.556757 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" event={"ID":"ac2063ed-42ef-4188-826b-a69356f49a65","Type":"ContainerStarted","Data":"dae1c3fa5bea031fd5f620cc8769d6ce7d3e3a3cf88bf1b907749801fc3ed471"} Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.565810 5003 generic.go:334] "Generic (PLEG): container finished" podID="a7e67ef6-3327-4473-a44e-3c65226ce5db" containerID="041eae90a52c3bb8cf61e357cb7cfc9eae16a6d670ec563ffd721323a623a543" exitCode=0 Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.565901 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-9cxv7" event={"ID":"a7e67ef6-3327-4473-a44e-3c65226ce5db","Type":"ContainerDied","Data":"041eae90a52c3bb8cf61e357cb7cfc9eae16a6d670ec563ffd721323a623a543"} Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.571615 5003 generic.go:334] "Generic (PLEG): container finished" podID="ac2063ed-42ef-4188-826b-a69356f49a65" containerID="254dae06a9484a8ea1e2c3c9ba2c50aae867cc9ba39ce56dedddbb9aea4102c5" exitCode=0 Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.571677 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" event={"ID":"ac2063ed-42ef-4188-826b-a69356f49a65","Type":"ContainerDied","Data":"254dae06a9484a8ea1e2c3c9ba2c50aae867cc9ba39ce56dedddbb9aea4102c5"} Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.894401 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-wnxt9"] Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.895194 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-wnxt9" Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.899527 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-index-dockercfg-xxmcf" Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.901666 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-wnxt9"] Jan 26 10:59:52 crc kubenswrapper[5003]: I0126 10:59:52.997193 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-545cg\" (UniqueName: \"kubernetes.io/projected/15e8c34d-3505-4448-9804-47072d555137-kube-api-access-545cg\") pod \"swift-operator-index-wnxt9\" (UID: \"15e8c34d-3505-4448-9804-47072d555137\") " pod="openstack-operators/swift-operator-index-wnxt9" Jan 26 10:59:53 crc kubenswrapper[5003]: I0126 10:59:53.099075 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-545cg\" (UniqueName: \"kubernetes.io/projected/15e8c34d-3505-4448-9804-47072d555137-kube-api-access-545cg\") pod \"swift-operator-index-wnxt9\" (UID: \"15e8c34d-3505-4448-9804-47072d555137\") " pod="openstack-operators/swift-operator-index-wnxt9" Jan 26 10:59:53 crc kubenswrapper[5003]: I0126 10:59:53.122422 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-545cg\" (UniqueName: \"kubernetes.io/projected/15e8c34d-3505-4448-9804-47072d555137-kube-api-access-545cg\") pod \"swift-operator-index-wnxt9\" (UID: \"15e8c34d-3505-4448-9804-47072d555137\") " pod="openstack-operators/swift-operator-index-wnxt9" Jan 26 10:59:53 crc kubenswrapper[5003]: I0126 10:59:53.213321 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-wnxt9" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.414990 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-wnxt9"] Jan 26 10:59:54 crc kubenswrapper[5003]: W0126 10:59:54.426140 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15e8c34d_3505_4448_9804_47072d555137.slice/crio-de652510e354db9dd5beaa0837b3bec6cc2f8e9bf1ef2b81e0365756226b3c13 WatchSource:0}: Error finding container de652510e354db9dd5beaa0837b3bec6cc2f8e9bf1ef2b81e0365756226b3c13: Status 404 returned error can't find the container with id de652510e354db9dd5beaa0837b3bec6cc2f8e9bf1ef2b81e0365756226b3c13 Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.582757 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-wnxt9" event={"ID":"15e8c34d-3505-4448-9804-47072d555137","Type":"ContainerStarted","Data":"de652510e354db9dd5beaa0837b3bec6cc2f8e9bf1ef2b81e0365756226b3c13"} Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.633946 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.696103 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.737408 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac2063ed-42ef-4188-826b-a69356f49a65-operator-scripts\") pod \"ac2063ed-42ef-4188-826b-a69356f49a65\" (UID: \"ac2063ed-42ef-4188-826b-a69356f49a65\") " Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.737492 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncb26\" (UniqueName: \"kubernetes.io/projected/ac2063ed-42ef-4188-826b-a69356f49a65-kube-api-access-ncb26\") pod \"ac2063ed-42ef-4188-826b-a69356f49a65\" (UID: \"ac2063ed-42ef-4188-826b-a69356f49a65\") " Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.738458 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac2063ed-42ef-4188-826b-a69356f49a65-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ac2063ed-42ef-4188-826b-a69356f49a65" (UID: "ac2063ed-42ef-4188-826b-a69356f49a65"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.742387 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac2063ed-42ef-4188-826b-a69356f49a65-kube-api-access-ncb26" (OuterVolumeSpecName: "kube-api-access-ncb26") pod "ac2063ed-42ef-4188-826b-a69356f49a65" (UID: "ac2063ed-42ef-4188-826b-a69356f49a65"). InnerVolumeSpecName "kube-api-access-ncb26". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.839309 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvlth\" (UniqueName: \"kubernetes.io/projected/a7e67ef6-3327-4473-a44e-3c65226ce5db-kube-api-access-xvlth\") pod \"a7e67ef6-3327-4473-a44e-3c65226ce5db\" (UID: \"a7e67ef6-3327-4473-a44e-3c65226ce5db\") " Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.839471 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7e67ef6-3327-4473-a44e-3c65226ce5db-operator-scripts\") pod \"a7e67ef6-3327-4473-a44e-3c65226ce5db\" (UID: \"a7e67ef6-3327-4473-a44e-3c65226ce5db\") " Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.839883 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac2063ed-42ef-4188-826b-a69356f49a65-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.839905 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncb26\" (UniqueName: \"kubernetes.io/projected/ac2063ed-42ef-4188-826b-a69356f49a65-kube-api-access-ncb26\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.840149 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7e67ef6-3327-4473-a44e-3c65226ce5db-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a7e67ef6-3327-4473-a44e-3c65226ce5db" (UID: "a7e67ef6-3327-4473-a44e-3c65226ce5db"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.842026 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7e67ef6-3327-4473-a44e-3c65226ce5db-kube-api-access-xvlth" (OuterVolumeSpecName: "kube-api-access-xvlth") pod "a7e67ef6-3327-4473-a44e-3c65226ce5db" (UID: "a7e67ef6-3327-4473-a44e-3c65226ce5db"). InnerVolumeSpecName "kube-api-access-xvlth". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.941462 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvlth\" (UniqueName: \"kubernetes.io/projected/a7e67ef6-3327-4473-a44e-3c65226ce5db-kube-api-access-xvlth\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:54 crc kubenswrapper[5003]: I0126 10:59:54.941578 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7e67ef6-3327-4473-a44e-3c65226ce5db-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:55 crc kubenswrapper[5003]: I0126 10:59:55.590669 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-9cxv7" Jan 26 10:59:55 crc kubenswrapper[5003]: I0126 10:59:55.590666 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-9cxv7" event={"ID":"a7e67ef6-3327-4473-a44e-3c65226ce5db","Type":"ContainerDied","Data":"648e3f876cb658026cad053f2a6f9b73d5b9415a09c874a465ae4f84b43d061d"} Jan 26 10:59:55 crc kubenswrapper[5003]: I0126 10:59:55.590781 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="648e3f876cb658026cad053f2a6f9b73d5b9415a09c874a465ae4f84b43d061d" Jan 26 10:59:55 crc kubenswrapper[5003]: I0126 10:59:55.591840 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" Jan 26 10:59:55 crc kubenswrapper[5003]: I0126 10:59:55.591818 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr" event={"ID":"ac2063ed-42ef-4188-826b-a69356f49a65","Type":"ContainerDied","Data":"dae1c3fa5bea031fd5f620cc8769d6ce7d3e3a3cf88bf1b907749801fc3ed471"} Jan 26 10:59:55 crc kubenswrapper[5003]: I0126 10:59:55.591968 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dae1c3fa5bea031fd5f620cc8769d6ce7d3e3a3cf88bf1b907749801fc3ed471" Jan 26 10:59:56 crc kubenswrapper[5003]: I0126 10:59:56.599214 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-wnxt9" event={"ID":"15e8c34d-3505-4448-9804-47072d555137","Type":"ContainerStarted","Data":"a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378"} Jan 26 10:59:56 crc kubenswrapper[5003]: I0126 10:59:56.620609 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-wnxt9" podStartSLOduration=2.722390742 podStartE2EDuration="4.62059343s" podCreationTimestamp="2026-01-26 10:59:52 +0000 UTC" firstStartedPulling="2026-01-26 10:59:54.428332947 +0000 UTC m=+1009.969558508" lastFinishedPulling="2026-01-26 10:59:56.326535635 +0000 UTC m=+1011.867761196" observedRunningTime="2026-01-26 10:59:56.617999956 +0000 UTC m=+1012.159225517" watchObservedRunningTime="2026-01-26 10:59:56.62059343 +0000 UTC m=+1012.161818991" Jan 26 10:59:57 crc kubenswrapper[5003]: I0126 10:59:57.481199 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-wnxt9"] Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.094714 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-wp2gn"] Jan 26 10:59:58 crc kubenswrapper[5003]: E0126 10:59:58.095107 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e67ef6-3327-4473-a44e-3c65226ce5db" containerName="mariadb-database-create" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.095129 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e67ef6-3327-4473-a44e-3c65226ce5db" containerName="mariadb-database-create" Jan 26 10:59:58 crc kubenswrapper[5003]: E0126 10:59:58.095152 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac2063ed-42ef-4188-826b-a69356f49a65" containerName="mariadb-account-create-update" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.095163 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac2063ed-42ef-4188-826b-a69356f49a65" containerName="mariadb-account-create-update" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.095375 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7e67ef6-3327-4473-a44e-3c65226ce5db" containerName="mariadb-database-create" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.095394 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac2063ed-42ef-4188-826b-a69356f49a65" containerName="mariadb-account-create-update" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.096054 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.108199 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-wp2gn"] Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.205177 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkknw\" (UniqueName: \"kubernetes.io/projected/f6b37c7e-571e-4047-99a1-149f0956e9a8-kube-api-access-mkknw\") pod \"swift-operator-index-wp2gn\" (UID: \"f6b37c7e-571e-4047-99a1-149f0956e9a8\") " pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.307233 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkknw\" (UniqueName: \"kubernetes.io/projected/f6b37c7e-571e-4047-99a1-149f0956e9a8-kube-api-access-mkknw\") pod \"swift-operator-index-wp2gn\" (UID: \"f6b37c7e-571e-4047-99a1-149f0956e9a8\") " pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.326066 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkknw\" (UniqueName: \"kubernetes.io/projected/f6b37c7e-571e-4047-99a1-149f0956e9a8-kube-api-access-mkknw\") pod \"swift-operator-index-wp2gn\" (UID: \"f6b37c7e-571e-4047-99a1-149f0956e9a8\") " pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.412934 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.612146 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/swift-operator-index-wnxt9" podUID="15e8c34d-3505-4448-9804-47072d555137" containerName="registry-server" containerID="cri-o://a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378" gracePeriod=2 Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.822843 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-wp2gn"] Jan 26 10:59:58 crc kubenswrapper[5003]: W0126 10:59:58.851902 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6b37c7e_571e_4047_99a1_149f0956e9a8.slice/crio-59604d9a33760f03a7e7fb3f5f6f9687725dd9f36e39a908488c8db47376c3b2 WatchSource:0}: Error finding container 59604d9a33760f03a7e7fb3f5f6f9687725dd9f36e39a908488c8db47376c3b2: Status 404 returned error can't find the container with id 59604d9a33760f03a7e7fb3f5f6f9687725dd9f36e39a908488c8db47376c3b2 Jan 26 10:59:58 crc kubenswrapper[5003]: I0126 10:59:58.981877 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-wnxt9" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.119082 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-545cg\" (UniqueName: \"kubernetes.io/projected/15e8c34d-3505-4448-9804-47072d555137-kube-api-access-545cg\") pod \"15e8c34d-3505-4448-9804-47072d555137\" (UID: \"15e8c34d-3505-4448-9804-47072d555137\") " Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.124260 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15e8c34d-3505-4448-9804-47072d555137-kube-api-access-545cg" (OuterVolumeSpecName: "kube-api-access-545cg") pod "15e8c34d-3505-4448-9804-47072d555137" (UID: "15e8c34d-3505-4448-9804-47072d555137"). InnerVolumeSpecName "kube-api-access-545cg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.221483 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-545cg\" (UniqueName: \"kubernetes.io/projected/15e8c34d-3505-4448-9804-47072d555137-kube-api-access-545cg\") on node \"crc\" DevicePath \"\"" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.314417 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.623697 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-wp2gn" event={"ID":"f6b37c7e-571e-4047-99a1-149f0956e9a8","Type":"ContainerStarted","Data":"c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d"} Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.624041 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-wp2gn" event={"ID":"f6b37c7e-571e-4047-99a1-149f0956e9a8","Type":"ContainerStarted","Data":"59604d9a33760f03a7e7fb3f5f6f9687725dd9f36e39a908488c8db47376c3b2"} Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.627483 5003 generic.go:334] "Generic (PLEG): container finished" podID="15e8c34d-3505-4448-9804-47072d555137" containerID="a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378" exitCode=0 Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.627530 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-wnxt9" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.627521 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-wnxt9" event={"ID":"15e8c34d-3505-4448-9804-47072d555137","Type":"ContainerDied","Data":"a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378"} Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.627599 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-wnxt9" event={"ID":"15e8c34d-3505-4448-9804-47072d555137","Type":"ContainerDied","Data":"de652510e354db9dd5beaa0837b3bec6cc2f8e9bf1ef2b81e0365756226b3c13"} Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.627632 5003 scope.go:117] "RemoveContainer" containerID="a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.650910 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-wp2gn" podStartSLOduration=1.594413181 podStartE2EDuration="1.650884281s" podCreationTimestamp="2026-01-26 10:59:58 +0000 UTC" firstStartedPulling="2026-01-26 10:59:58.855114876 +0000 UTC m=+1014.396340437" lastFinishedPulling="2026-01-26 10:59:58.911585976 +0000 UTC m=+1014.452811537" observedRunningTime="2026-01-26 10:59:59.645007112 +0000 UTC m=+1015.186232693" watchObservedRunningTime="2026-01-26 10:59:59.650884281 +0000 UTC m=+1015.192109852" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.661245 5003 scope.go:117] "RemoveContainer" containerID="a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378" Jan 26 10:59:59 crc kubenswrapper[5003]: E0126 10:59:59.661939 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378\": container with ID starting with a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378 not found: ID does not exist" containerID="a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.661989 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378"} err="failed to get container status \"a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378\": rpc error: code = NotFound desc = could not find container \"a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378\": container with ID starting with a38dbca9e53d402fb7607fca71e90c52bcaa256a5e3637c560d1c0d4ff371378 not found: ID does not exist" Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.670095 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-wnxt9"] Jan 26 10:59:59 crc kubenswrapper[5003]: I0126 10:59:59.675969 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-index-wnxt9"] Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.186376 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj"] Jan 26 11:00:00 crc kubenswrapper[5003]: E0126 11:00:00.187204 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15e8c34d-3505-4448-9804-47072d555137" containerName="registry-server" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.187226 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="15e8c34d-3505-4448-9804-47072d555137" containerName="registry-server" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.187631 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="15e8c34d-3505-4448-9804-47072d555137" containerName="registry-server" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.188500 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.197088 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.197580 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.216227 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj"] Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.340383 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnzs8\" (UniqueName: \"kubernetes.io/projected/980e8360-14d1-410c-b84d-c37d38f0ab56-kube-api-access-tnzs8\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.340500 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/980e8360-14d1-410c-b84d-c37d38f0ab56-secret-volume\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.340583 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/980e8360-14d1-410c-b84d-c37d38f0ab56-config-volume\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.354575 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-db-sync-dhfpd"] Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.355374 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.357045 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-config-data" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.363203 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-dhfpd"] Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.364253 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-barbican-dockercfg-t8fjf" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.441685 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnzs8\" (UniqueName: \"kubernetes.io/projected/980e8360-14d1-410c-b84d-c37d38f0ab56-kube-api-access-tnzs8\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.441737 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39e8f294-ba0f-4524-914c-501145d935b5-db-sync-config-data\") pod \"barbican-db-sync-dhfpd\" (UID: \"39e8f294-ba0f-4524-914c-501145d935b5\") " pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.441768 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/980e8360-14d1-410c-b84d-c37d38f0ab56-secret-volume\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.441803 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb9dr\" (UniqueName: \"kubernetes.io/projected/39e8f294-ba0f-4524-914c-501145d935b5-kube-api-access-sb9dr\") pod \"barbican-db-sync-dhfpd\" (UID: \"39e8f294-ba0f-4524-914c-501145d935b5\") " pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.441839 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/980e8360-14d1-410c-b84d-c37d38f0ab56-config-volume\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.442659 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/980e8360-14d1-410c-b84d-c37d38f0ab56-config-volume\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.446714 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/980e8360-14d1-410c-b84d-c37d38f0ab56-secret-volume\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.463363 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnzs8\" (UniqueName: \"kubernetes.io/projected/980e8360-14d1-410c-b84d-c37d38f0ab56-kube-api-access-tnzs8\") pod \"collect-profiles-29490420-btqwj\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.525591 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.543745 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39e8f294-ba0f-4524-914c-501145d935b5-db-sync-config-data\") pod \"barbican-db-sync-dhfpd\" (UID: \"39e8f294-ba0f-4524-914c-501145d935b5\") " pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.543814 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb9dr\" (UniqueName: \"kubernetes.io/projected/39e8f294-ba0f-4524-914c-501145d935b5-kube-api-access-sb9dr\") pod \"barbican-db-sync-dhfpd\" (UID: \"39e8f294-ba0f-4524-914c-501145d935b5\") " pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.548688 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39e8f294-ba0f-4524-914c-501145d935b5-db-sync-config-data\") pod \"barbican-db-sync-dhfpd\" (UID: \"39e8f294-ba0f-4524-914c-501145d935b5\") " pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.579549 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb9dr\" (UniqueName: \"kubernetes.io/projected/39e8f294-ba0f-4524-914c-501145d935b5-kube-api-access-sb9dr\") pod \"barbican-db-sync-dhfpd\" (UID: \"39e8f294-ba0f-4524-914c-501145d935b5\") " pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.674198 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:00 crc kubenswrapper[5003]: I0126 11:00:00.953480 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj"] Jan 26 11:00:00 crc kubenswrapper[5003]: W0126 11:00:00.963016 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod980e8360_14d1_410c_b84d_c37d38f0ab56.slice/crio-319f1f633f6f115313ff0ca5d9ae3be3aa70f50da6cdb1a18918edaf3883ab17 WatchSource:0}: Error finding container 319f1f633f6f115313ff0ca5d9ae3be3aa70f50da6cdb1a18918edaf3883ab17: Status 404 returned error can't find the container with id 319f1f633f6f115313ff0ca5d9ae3be3aa70f50da6cdb1a18918edaf3883ab17 Jan 26 11:00:01 crc kubenswrapper[5003]: I0126 11:00:01.014171 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15e8c34d-3505-4448-9804-47072d555137" path="/var/lib/kubelet/pods/15e8c34d-3505-4448-9804-47072d555137/volumes" Jan 26 11:00:01 crc kubenswrapper[5003]: I0126 11:00:01.082657 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-dhfpd"] Jan 26 11:00:01 crc kubenswrapper[5003]: W0126 11:00:01.089851 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39e8f294_ba0f_4524_914c_501145d935b5.slice/crio-6f93706a0dbf2c70a7d984120cd2dcdf59b5ae74e26c517122f67fc755b76820 WatchSource:0}: Error finding container 6f93706a0dbf2c70a7d984120cd2dcdf59b5ae74e26c517122f67fc755b76820: Status 404 returned error can't find the container with id 6f93706a0dbf2c70a7d984120cd2dcdf59b5ae74e26c517122f67fc755b76820 Jan 26 11:00:01 crc kubenswrapper[5003]: I0126 11:00:01.647790 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" event={"ID":"39e8f294-ba0f-4524-914c-501145d935b5","Type":"ContainerStarted","Data":"6f93706a0dbf2c70a7d984120cd2dcdf59b5ae74e26c517122f67fc755b76820"} Jan 26 11:00:01 crc kubenswrapper[5003]: I0126 11:00:01.649442 5003 generic.go:334] "Generic (PLEG): container finished" podID="980e8360-14d1-410c-b84d-c37d38f0ab56" containerID="d89553324ed58cb2202b36d9526951385bc286190fadd37443146460aa2a38e9" exitCode=0 Jan 26 11:00:01 crc kubenswrapper[5003]: I0126 11:00:01.649475 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" event={"ID":"980e8360-14d1-410c-b84d-c37d38f0ab56","Type":"ContainerDied","Data":"d89553324ed58cb2202b36d9526951385bc286190fadd37443146460aa2a38e9"} Jan 26 11:00:01 crc kubenswrapper[5003]: I0126 11:00:01.649495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" event={"ID":"980e8360-14d1-410c-b84d-c37d38f0ab56","Type":"ContainerStarted","Data":"319f1f633f6f115313ff0ca5d9ae3be3aa70f50da6cdb1a18918edaf3883ab17"} Jan 26 11:00:04 crc kubenswrapper[5003]: I0126 11:00:04.904375 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.019279 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/980e8360-14d1-410c-b84d-c37d38f0ab56-config-volume\") pod \"980e8360-14d1-410c-b84d-c37d38f0ab56\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.019401 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnzs8\" (UniqueName: \"kubernetes.io/projected/980e8360-14d1-410c-b84d-c37d38f0ab56-kube-api-access-tnzs8\") pod \"980e8360-14d1-410c-b84d-c37d38f0ab56\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.019480 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/980e8360-14d1-410c-b84d-c37d38f0ab56-secret-volume\") pod \"980e8360-14d1-410c-b84d-c37d38f0ab56\" (UID: \"980e8360-14d1-410c-b84d-c37d38f0ab56\") " Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.020689 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/980e8360-14d1-410c-b84d-c37d38f0ab56-config-volume" (OuterVolumeSpecName: "config-volume") pod "980e8360-14d1-410c-b84d-c37d38f0ab56" (UID: "980e8360-14d1-410c-b84d-c37d38f0ab56"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.039844 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/980e8360-14d1-410c-b84d-c37d38f0ab56-kube-api-access-tnzs8" (OuterVolumeSpecName: "kube-api-access-tnzs8") pod "980e8360-14d1-410c-b84d-c37d38f0ab56" (UID: "980e8360-14d1-410c-b84d-c37d38f0ab56"). InnerVolumeSpecName "kube-api-access-tnzs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.040273 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/980e8360-14d1-410c-b84d-c37d38f0ab56-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "980e8360-14d1-410c-b84d-c37d38f0ab56" (UID: "980e8360-14d1-410c-b84d-c37d38f0ab56"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.121112 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/980e8360-14d1-410c-b84d-c37d38f0ab56-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.121151 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnzs8\" (UniqueName: \"kubernetes.io/projected/980e8360-14d1-410c-b84d-c37d38f0ab56-kube-api-access-tnzs8\") on node \"crc\" DevicePath \"\"" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.121164 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/980e8360-14d1-410c-b84d-c37d38f0ab56-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.674390 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.674426 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490420-btqwj" event={"ID":"980e8360-14d1-410c-b84d-c37d38f0ab56","Type":"ContainerDied","Data":"319f1f633f6f115313ff0ca5d9ae3be3aa70f50da6cdb1a18918edaf3883ab17"} Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.674467 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="319f1f633f6f115313ff0ca5d9ae3be3aa70f50da6cdb1a18918edaf3883ab17" Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.675987 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" event={"ID":"39e8f294-ba0f-4524-914c-501145d935b5","Type":"ContainerStarted","Data":"d076e4e26807baf418aff0c3491c1722cbc26711fd90211216908a74466029d2"} Jan 26 11:00:05 crc kubenswrapper[5003]: I0126 11:00:05.691043 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" podStartSLOduration=1.300334494 podStartE2EDuration="5.691022676s" podCreationTimestamp="2026-01-26 11:00:00 +0000 UTC" firstStartedPulling="2026-01-26 11:00:01.091646368 +0000 UTC m=+1016.632871929" lastFinishedPulling="2026-01-26 11:00:05.48233455 +0000 UTC m=+1021.023560111" observedRunningTime="2026-01-26 11:00:05.690197062 +0000 UTC m=+1021.231422623" watchObservedRunningTime="2026-01-26 11:00:05.691022676 +0000 UTC m=+1021.232248247" Jan 26 11:00:08 crc kubenswrapper[5003]: I0126 11:00:08.413179 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 11:00:08 crc kubenswrapper[5003]: I0126 11:00:08.413715 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 11:00:08 crc kubenswrapper[5003]: I0126 11:00:08.445045 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 11:00:08 crc kubenswrapper[5003]: I0126 11:00:08.697415 5003 generic.go:334] "Generic (PLEG): container finished" podID="39e8f294-ba0f-4524-914c-501145d935b5" containerID="d076e4e26807baf418aff0c3491c1722cbc26711fd90211216908a74466029d2" exitCode=0 Jan 26 11:00:08 crc kubenswrapper[5003]: I0126 11:00:08.697504 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" event={"ID":"39e8f294-ba0f-4524-914c-501145d935b5","Type":"ContainerDied","Data":"d076e4e26807baf418aff0c3491c1722cbc26711fd90211216908a74466029d2"} Jan 26 11:00:08 crc kubenswrapper[5003]: I0126 11:00:08.734486 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.054235 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.199030 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39e8f294-ba0f-4524-914c-501145d935b5-db-sync-config-data\") pod \"39e8f294-ba0f-4524-914c-501145d935b5\" (UID: \"39e8f294-ba0f-4524-914c-501145d935b5\") " Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.199182 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb9dr\" (UniqueName: \"kubernetes.io/projected/39e8f294-ba0f-4524-914c-501145d935b5-kube-api-access-sb9dr\") pod \"39e8f294-ba0f-4524-914c-501145d935b5\" (UID: \"39e8f294-ba0f-4524-914c-501145d935b5\") " Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.205385 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39e8f294-ba0f-4524-914c-501145d935b5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "39e8f294-ba0f-4524-914c-501145d935b5" (UID: "39e8f294-ba0f-4524-914c-501145d935b5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.211132 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39e8f294-ba0f-4524-914c-501145d935b5-kube-api-access-sb9dr" (OuterVolumeSpecName: "kube-api-access-sb9dr") pod "39e8f294-ba0f-4524-914c-501145d935b5" (UID: "39e8f294-ba0f-4524-914c-501145d935b5"). InnerVolumeSpecName "kube-api-access-sb9dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.300725 5003 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39e8f294-ba0f-4524-914c-501145d935b5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.300771 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb9dr\" (UniqueName: \"kubernetes.io/projected/39e8f294-ba0f-4524-914c-501145d935b5-kube-api-access-sb9dr\") on node \"crc\" DevicePath \"\"" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.712003 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.713489 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-dhfpd" event={"ID":"39e8f294-ba0f-4524-914c-501145d935b5","Type":"ContainerDied","Data":"6f93706a0dbf2c70a7d984120cd2dcdf59b5ae74e26c517122f67fc755b76820"} Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.713535 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f93706a0dbf2c70a7d984120cd2dcdf59b5ae74e26c517122f67fc755b76820" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.992967 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-worker-85d88cd875-jg89w"] Jan 26 11:00:10 crc kubenswrapper[5003]: E0126 11:00:10.993247 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980e8360-14d1-410c-b84d-c37d38f0ab56" containerName="collect-profiles" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.993261 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980e8360-14d1-410c-b84d-c37d38f0ab56" containerName="collect-profiles" Jan 26 11:00:10 crc kubenswrapper[5003]: E0126 11:00:10.993271 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39e8f294-ba0f-4524-914c-501145d935b5" containerName="barbican-db-sync" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.993278 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="39e8f294-ba0f-4524-914c-501145d935b5" containerName="barbican-db-sync" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.993417 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="39e8f294-ba0f-4524-914c-501145d935b5" containerName="barbican-db-sync" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.993432 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980e8360-14d1-410c-b84d-c37d38f0ab56" containerName="collect-profiles" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.994042 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77"] Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.994813 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:10 crc kubenswrapper[5003]: I0126 11:00:10.995640 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.002855 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-worker-config-data" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.003049 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.003201 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-config-data" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.003327 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-barbican-dockercfg-t8fjf" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.068554 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-worker-85d88cd875-jg89w"] Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.068865 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77"] Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.120116 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data-custom\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.120159 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmw85\" (UniqueName: \"kubernetes.io/projected/0aac059e-645d-4967-838a-e51e27aad2ac-kube-api-access-cmw85\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.120226 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.120285 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data-custom\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.120412 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.120436 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca7900c-1191-4d34-a44f-29fd6d510d90-logs\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.120554 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq662\" (UniqueName: \"kubernetes.io/projected/3ca7900c-1191-4d34-a44f-29fd6d510d90-kube-api-access-sq662\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.120724 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aac059e-645d-4967-838a-e51e27aad2ac-logs\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.176894 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-api-789587dbb8-kcmjx"] Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.178023 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.180080 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-api-config-data" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.202130 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-api-789587dbb8-kcmjx"] Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222344 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data-custom\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222386 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmw85\" (UniqueName: \"kubernetes.io/projected/0aac059e-645d-4967-838a-e51e27aad2ac-kube-api-access-cmw85\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222437 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222462 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data-custom\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222487 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222505 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca7900c-1191-4d34-a44f-29fd6d510d90-logs\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222533 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq662\" (UniqueName: \"kubernetes.io/projected/3ca7900c-1191-4d34-a44f-29fd6d510d90-kube-api-access-sq662\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222570 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aac059e-645d-4967-838a-e51e27aad2ac-logs\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.222944 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aac059e-645d-4967-838a-e51e27aad2ac-logs\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.224064 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca7900c-1191-4d34-a44f-29fd6d510d90-logs\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.227454 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data-custom\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.227523 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.229445 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.232905 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data-custom\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.243908 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmw85\" (UniqueName: \"kubernetes.io/projected/0aac059e-645d-4967-838a-e51e27aad2ac-kube-api-access-cmw85\") pod \"barbican-worker-85d88cd875-jg89w\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.244370 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq662\" (UniqueName: \"kubernetes.io/projected/3ca7900c-1191-4d34-a44f-29fd6d510d90-kube-api-access-sq662\") pod \"barbican-keystone-listener-66f485f88-d4z77\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.324254 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data-custom\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.324329 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.324417 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9qsr\" (UniqueName: \"kubernetes.io/projected/8e9f91e0-3cda-46fd-9034-08b41bf5f546-kube-api-access-p9qsr\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.324464 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e9f91e0-3cda-46fd-9034-08b41bf5f546-logs\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.375641 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.385886 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.426025 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data-custom\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.426069 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.426157 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9qsr\" (UniqueName: \"kubernetes.io/projected/8e9f91e0-3cda-46fd-9034-08b41bf5f546-kube-api-access-p9qsr\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.426205 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e9f91e0-3cda-46fd-9034-08b41bf5f546-logs\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.426991 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e9f91e0-3cda-46fd-9034-08b41bf5f546-logs\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.432248 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data-custom\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.432645 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.445903 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9qsr\" (UniqueName: \"kubernetes.io/projected/8e9f91e0-3cda-46fd-9034-08b41bf5f546-kube-api-access-p9qsr\") pod \"barbican-api-789587dbb8-kcmjx\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.505618 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.766107 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz"] Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.767398 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.771361 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-x8f6q" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.779631 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz"] Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.821696 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-worker-85d88cd875-jg89w"] Jan 26 11:00:11 crc kubenswrapper[5003]: W0126 11:00:11.931632 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ca7900c_1191_4d34_a44f_29fd6d510d90.slice/crio-80d6d084fd480d033efbc70ed9544ea8111770ff6dfaf400bcf614cd1a9f8be4 WatchSource:0}: Error finding container 80d6d084fd480d033efbc70ed9544ea8111770ff6dfaf400bcf614cd1a9f8be4: Status 404 returned error can't find the container with id 80d6d084fd480d033efbc70ed9544ea8111770ff6dfaf400bcf614cd1a9f8be4 Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.932442 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77"] Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.933424 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7bff\" (UniqueName: \"kubernetes.io/projected/77bf3970-44c7-4a45-993c-aeec1a194089-kube-api-access-r7bff\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.933460 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-util\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:11 crc kubenswrapper[5003]: I0126 11:00:11.933561 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-bundle\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.013286 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-api-789587dbb8-kcmjx"] Jan 26 11:00:12 crc kubenswrapper[5003]: W0126 11:00:12.015714 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e9f91e0_3cda_46fd_9034_08b41bf5f546.slice/crio-274b8389b6b0dc53a5de34ce8ad0938b2de4028c96ab65e8f080a80e6cafdb2a WatchSource:0}: Error finding container 274b8389b6b0dc53a5de34ce8ad0938b2de4028c96ab65e8f080a80e6cafdb2a: Status 404 returned error can't find the container with id 274b8389b6b0dc53a5de34ce8ad0938b2de4028c96ab65e8f080a80e6cafdb2a Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.034403 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-bundle\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.034538 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7bff\" (UniqueName: \"kubernetes.io/projected/77bf3970-44c7-4a45-993c-aeec1a194089-kube-api-access-r7bff\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.034579 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-util\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.034984 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-bundle\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.035011 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-util\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.052349 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7bff\" (UniqueName: \"kubernetes.io/projected/77bf3970-44c7-4a45-993c-aeec1a194089-kube-api-access-r7bff\") pod \"996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.086487 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.331232 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz"] Jan 26 11:00:12 crc kubenswrapper[5003]: W0126 11:00:12.336801 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77bf3970_44c7_4a45_993c_aeec1a194089.slice/crio-c6a8575f076f0adc1a5ff94b884c750874e869dcf7a3c971099ae1ec4721ff6c WatchSource:0}: Error finding container c6a8575f076f0adc1a5ff94b884c750874e869dcf7a3c971099ae1ec4721ff6c: Status 404 returned error can't find the container with id c6a8575f076f0adc1a5ff94b884c750874e869dcf7a3c971099ae1ec4721ff6c Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.727817 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" event={"ID":"3ca7900c-1191-4d34-a44f-29fd6d510d90","Type":"ContainerStarted","Data":"80d6d084fd480d033efbc70ed9544ea8111770ff6dfaf400bcf614cd1a9f8be4"} Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.729132 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" event={"ID":"0aac059e-645d-4967-838a-e51e27aad2ac","Type":"ContainerStarted","Data":"14f3d31510cae86d510b49ef90686c64210c71b197759f7e9f25b234c0b1314f"} Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.731152 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" event={"ID":"8e9f91e0-3cda-46fd-9034-08b41bf5f546","Type":"ContainerStarted","Data":"c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd"} Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.731207 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" event={"ID":"8e9f91e0-3cda-46fd-9034-08b41bf5f546","Type":"ContainerStarted","Data":"1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28"} Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.731219 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" event={"ID":"8e9f91e0-3cda-46fd-9034-08b41bf5f546","Type":"ContainerStarted","Data":"274b8389b6b0dc53a5de34ce8ad0938b2de4028c96ab65e8f080a80e6cafdb2a"} Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.731622 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.732573 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" event={"ID":"77bf3970-44c7-4a45-993c-aeec1a194089","Type":"ContainerStarted","Data":"c6a8575f076f0adc1a5ff94b884c750874e869dcf7a3c971099ae1ec4721ff6c"} Jan 26 11:00:12 crc kubenswrapper[5003]: I0126 11:00:12.750197 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" podStartSLOduration=1.75017226 podStartE2EDuration="1.75017226s" podCreationTimestamp="2026-01-26 11:00:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:00:12.748636566 +0000 UTC m=+1028.289862127" watchObservedRunningTime="2026-01-26 11:00:12.75017226 +0000 UTC m=+1028.291397821" Jan 26 11:00:13 crc kubenswrapper[5003]: I0126 11:00:13.740303 5003 generic.go:334] "Generic (PLEG): container finished" podID="77bf3970-44c7-4a45-993c-aeec1a194089" containerID="ef4fc9a7abcd51e2ee632b9e5ac8d76aa4c55c2b913742a834636652e4441444" exitCode=0 Jan 26 11:00:13 crc kubenswrapper[5003]: I0126 11:00:13.740643 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" event={"ID":"77bf3970-44c7-4a45-993c-aeec1a194089","Type":"ContainerDied","Data":"ef4fc9a7abcd51e2ee632b9e5ac8d76aa4c55c2b913742a834636652e4441444"} Jan 26 11:00:13 crc kubenswrapper[5003]: I0126 11:00:13.741042 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:15 crc kubenswrapper[5003]: I0126 11:00:15.756524 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" event={"ID":"3ca7900c-1191-4d34-a44f-29fd6d510d90","Type":"ContainerStarted","Data":"cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6"} Jan 26 11:00:15 crc kubenswrapper[5003]: I0126 11:00:15.756883 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" event={"ID":"3ca7900c-1191-4d34-a44f-29fd6d510d90","Type":"ContainerStarted","Data":"190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c"} Jan 26 11:00:15 crc kubenswrapper[5003]: I0126 11:00:15.759036 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" event={"ID":"0aac059e-645d-4967-838a-e51e27aad2ac","Type":"ContainerStarted","Data":"b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17"} Jan 26 11:00:15 crc kubenswrapper[5003]: I0126 11:00:15.759078 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" event={"ID":"0aac059e-645d-4967-838a-e51e27aad2ac","Type":"ContainerStarted","Data":"e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17"} Jan 26 11:00:15 crc kubenswrapper[5003]: I0126 11:00:15.761092 5003 generic.go:334] "Generic (PLEG): container finished" podID="77bf3970-44c7-4a45-993c-aeec1a194089" containerID="f12709316bf926a0e401de08ced7b85510db278f932efac7e5ba75658a9a6657" exitCode=0 Jan 26 11:00:15 crc kubenswrapper[5003]: I0126 11:00:15.761125 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" event={"ID":"77bf3970-44c7-4a45-993c-aeec1a194089","Type":"ContainerDied","Data":"f12709316bf926a0e401de08ced7b85510db278f932efac7e5ba75658a9a6657"} Jan 26 11:00:15 crc kubenswrapper[5003]: I0126 11:00:15.777337 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" podStartSLOduration=3.059134923 podStartE2EDuration="5.777317451s" podCreationTimestamp="2026-01-26 11:00:10 +0000 UTC" firstStartedPulling="2026-01-26 11:00:11.935134102 +0000 UTC m=+1027.476359663" lastFinishedPulling="2026-01-26 11:00:14.65331663 +0000 UTC m=+1030.194542191" observedRunningTime="2026-01-26 11:00:15.770793563 +0000 UTC m=+1031.312019134" watchObservedRunningTime="2026-01-26 11:00:15.777317451 +0000 UTC m=+1031.318543022" Jan 26 11:00:15 crc kubenswrapper[5003]: I0126 11:00:15.817429 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" podStartSLOduration=2.938615216 podStartE2EDuration="5.817404591s" podCreationTimestamp="2026-01-26 11:00:10 +0000 UTC" firstStartedPulling="2026-01-26 11:00:11.828111602 +0000 UTC m=+1027.369337163" lastFinishedPulling="2026-01-26 11:00:14.706900977 +0000 UTC m=+1030.248126538" observedRunningTime="2026-01-26 11:00:15.80761039 +0000 UTC m=+1031.348835951" watchObservedRunningTime="2026-01-26 11:00:15.817404591 +0000 UTC m=+1031.358630172" Jan 26 11:00:16 crc kubenswrapper[5003]: I0126 11:00:16.770805 5003 generic.go:334] "Generic (PLEG): container finished" podID="77bf3970-44c7-4a45-993c-aeec1a194089" containerID="4980f3504eb05cb29e3cdd365f7d46343966d9fd29860a7c25d731856895b254" exitCode=0 Jan 26 11:00:16 crc kubenswrapper[5003]: I0126 11:00:16.770993 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" event={"ID":"77bf3970-44c7-4a45-993c-aeec1a194089","Type":"ContainerDied","Data":"4980f3504eb05cb29e3cdd365f7d46343966d9fd29860a7c25d731856895b254"} Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.166797 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.351385 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-bundle\") pod \"77bf3970-44c7-4a45-993c-aeec1a194089\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.351462 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-util\") pod \"77bf3970-44c7-4a45-993c-aeec1a194089\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.351491 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7bff\" (UniqueName: \"kubernetes.io/projected/77bf3970-44c7-4a45-993c-aeec1a194089-kube-api-access-r7bff\") pod \"77bf3970-44c7-4a45-993c-aeec1a194089\" (UID: \"77bf3970-44c7-4a45-993c-aeec1a194089\") " Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.352553 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-bundle" (OuterVolumeSpecName: "bundle") pod "77bf3970-44c7-4a45-993c-aeec1a194089" (UID: "77bf3970-44c7-4a45-993c-aeec1a194089"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.371052 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77bf3970-44c7-4a45-993c-aeec1a194089-kube-api-access-r7bff" (OuterVolumeSpecName: "kube-api-access-r7bff") pod "77bf3970-44c7-4a45-993c-aeec1a194089" (UID: "77bf3970-44c7-4a45-993c-aeec1a194089"). InnerVolumeSpecName "kube-api-access-r7bff". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.377877 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-util" (OuterVolumeSpecName: "util") pod "77bf3970-44c7-4a45-993c-aeec1a194089" (UID: "77bf3970-44c7-4a45-993c-aeec1a194089"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.453303 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.453334 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77bf3970-44c7-4a45-993c-aeec1a194089-util\") on node \"crc\" DevicePath \"\"" Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.453346 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7bff\" (UniqueName: \"kubernetes.io/projected/77bf3970-44c7-4a45-993c-aeec1a194089-kube-api-access-r7bff\") on node \"crc\" DevicePath \"\"" Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.794000 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" event={"ID":"77bf3970-44c7-4a45-993c-aeec1a194089","Type":"ContainerDied","Data":"c6a8575f076f0adc1a5ff94b884c750874e869dcf7a3c971099ae1ec4721ff6c"} Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.794046 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6a8575f076f0adc1a5ff94b884c750874e869dcf7a3c971099ae1ec4721ff6c" Jan 26 11:00:18 crc kubenswrapper[5003]: I0126 11:00:18.794099 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz" Jan 26 11:00:22 crc kubenswrapper[5003]: I0126 11:00:22.960631 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:22 crc kubenswrapper[5003]: I0126 11:00:22.997900 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.338398 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr"] Jan 26 11:00:32 crc kubenswrapper[5003]: E0126 11:00:32.339267 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77bf3970-44c7-4a45-993c-aeec1a194089" containerName="extract" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.339298 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="77bf3970-44c7-4a45-993c-aeec1a194089" containerName="extract" Jan 26 11:00:32 crc kubenswrapper[5003]: E0126 11:00:32.339317 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77bf3970-44c7-4a45-993c-aeec1a194089" containerName="util" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.339325 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="77bf3970-44c7-4a45-993c-aeec1a194089" containerName="util" Jan 26 11:00:32 crc kubenswrapper[5003]: E0126 11:00:32.339338 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77bf3970-44c7-4a45-993c-aeec1a194089" containerName="pull" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.339346 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="77bf3970-44c7-4a45-993c-aeec1a194089" containerName="pull" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.339486 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="77bf3970-44c7-4a45-993c-aeec1a194089" containerName="extract" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.340081 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.342944 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-service-cert" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.343364 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-c8mrt" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.363341 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr"] Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.374108 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-webhook-cert\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.374230 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqk7g\" (UniqueName: \"kubernetes.io/projected/dcce3480-cea7-4075-80c8-60c85f8acdab-kube-api-access-fqk7g\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.374255 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-apiservice-cert\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.475361 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-webhook-cert\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.475447 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqk7g\" (UniqueName: \"kubernetes.io/projected/dcce3480-cea7-4075-80c8-60c85f8acdab-kube-api-access-fqk7g\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.475473 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-apiservice-cert\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.485317 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-webhook-cert\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.485958 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-apiservice-cert\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.492168 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqk7g\" (UniqueName: \"kubernetes.io/projected/dcce3480-cea7-4075-80c8-60c85f8acdab-kube-api-access-fqk7g\") pod \"swift-operator-controller-manager-5df966c899-zcbvr\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:32 crc kubenswrapper[5003]: I0126 11:00:32.659619 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:33 crc kubenswrapper[5003]: I0126 11:00:33.077258 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr"] Jan 26 11:00:33 crc kubenswrapper[5003]: I0126 11:00:33.928494 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" event={"ID":"dcce3480-cea7-4075-80c8-60c85f8acdab","Type":"ContainerStarted","Data":"01ccd6c8b992c588a28c0a05ed2d726f47506ccdc507e12969509e44a963d1a9"} Jan 26 11:00:46 crc kubenswrapper[5003]: I0126 11:00:46.033364 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" event={"ID":"dcce3480-cea7-4075-80c8-60c85f8acdab","Type":"ContainerStarted","Data":"da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e"} Jan 26 11:00:46 crc kubenswrapper[5003]: I0126 11:00:46.033943 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:46 crc kubenswrapper[5003]: I0126 11:00:46.059634 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" podStartSLOduration=1.516759626 podStartE2EDuration="14.059612704s" podCreationTimestamp="2026-01-26 11:00:32 +0000 UTC" firstStartedPulling="2026-01-26 11:00:33.087609318 +0000 UTC m=+1048.628834879" lastFinishedPulling="2026-01-26 11:00:45.630462396 +0000 UTC m=+1061.171687957" observedRunningTime="2026-01-26 11:00:46.050082371 +0000 UTC m=+1061.591307952" watchObservedRunningTime="2026-01-26 11:00:46.059612704 +0000 UTC m=+1061.600838265" Jan 26 11:00:52 crc kubenswrapper[5003]: I0126 11:00:52.666792 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.421182 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.427821 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.430909 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.431358 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.431762 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.432240 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-n9968" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.446312 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.550799 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.550880 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-cache\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.550931 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpdtb\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-kube-api-access-bpdtb\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.551018 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.551060 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-lock\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.652448 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.652545 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-lock\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.652637 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.652693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-cache\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.652766 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpdtb\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-kube-api-access-bpdtb\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: E0126 11:00:57.653216 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:00:57 crc kubenswrapper[5003]: E0126 11:00:57.653266 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:00:57 crc kubenswrapper[5003]: E0126 11:00:57.653346 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift podName:b1cd12b2-b1e1-4085-b627-52c229799294 nodeName:}" failed. No retries permitted until 2026-01-26 11:00:58.153327008 +0000 UTC m=+1073.694552569 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift") pod "swift-storage-0" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294") : configmap "swift-ring-files" not found Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.653556 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") device mount path \"/mnt/openstack/pv03\"" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.654437 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-cache\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.655187 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-lock\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.688144 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.688334 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpdtb\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-kube-api-access-bpdtb\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.963606 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-qq2x8"] Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.964744 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.966969 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.967214 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.968652 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Jan 26 11:00:57 crc kubenswrapper[5003]: I0126 11:00:57.978958 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-qq2x8"] Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.058216 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-swiftconf\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.058272 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-scripts\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.058322 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxhs6\" (UniqueName: \"kubernetes.io/projected/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-kube-api-access-jxhs6\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.058390 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-ring-data-devices\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.058444 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-etc-swift\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.058485 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-dispersionconf\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.159583 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-etc-swift\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.159968 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-dispersionconf\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.160029 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-swiftconf\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.160061 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-scripts\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.160094 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxhs6\" (UniqueName: \"kubernetes.io/projected/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-kube-api-access-jxhs6\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.160123 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-ring-data-devices\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.160193 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:58 crc kubenswrapper[5003]: E0126 11:00:58.160431 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:00:58 crc kubenswrapper[5003]: E0126 11:00:58.160465 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:00:58 crc kubenswrapper[5003]: E0126 11:00:58.160532 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift podName:b1cd12b2-b1e1-4085-b627-52c229799294 nodeName:}" failed. No retries permitted until 2026-01-26 11:00:59.160509466 +0000 UTC m=+1074.701735017 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift") pod "swift-storage-0" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294") : configmap "swift-ring-files" not found Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.160810 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-etc-swift\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.161484 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-scripts\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.164779 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-ring-data-devices\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.167877 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-dispersionconf\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.177682 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-swiftconf\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.187988 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxhs6\" (UniqueName: \"kubernetes.io/projected/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-kube-api-access-jxhs6\") pod \"swift-ring-rebalance-qq2x8\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.284333 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.547310 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf"] Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.548687 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.577047 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf"] Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.666750 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-run-httpd\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.666810 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-log-httpd\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.666902 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a9437ea-d818-4371-907f-ef50e49fab62-config-data\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.666926 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.667036 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv4zk\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-kube-api-access-rv4zk\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.755448 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-qq2x8"] Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.770761 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a9437ea-d818-4371-907f-ef50e49fab62-config-data\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.770807 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.770855 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv4zk\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-kube-api-access-rv4zk\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.770922 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-run-httpd\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.770955 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-log-httpd\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: E0126 11:00:58.771027 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:00:58 crc kubenswrapper[5003]: E0126 11:00:58.771067 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf: configmap "swift-ring-files" not found Jan 26 11:00:58 crc kubenswrapper[5003]: E0126 11:00:58.771136 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift podName:2a9437ea-d818-4371-907f-ef50e49fab62 nodeName:}" failed. No retries permitted until 2026-01-26 11:00:59.271112561 +0000 UTC m=+1074.812338192 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift") pod "swift-proxy-67f6cc5479-gvvmf" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62") : configmap "swift-ring-files" not found Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.771732 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-log-httpd\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.772246 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-run-httpd\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.777446 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a9437ea-d818-4371-907f-ef50e49fab62-config-data\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:58 crc kubenswrapper[5003]: I0126 11:00:58.787331 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv4zk\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-kube-api-access-rv4zk\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:59 crc kubenswrapper[5003]: I0126 11:00:59.130053 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" event={"ID":"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e","Type":"ContainerStarted","Data":"d248e6d78e61c401845cbc0e40db9ea8f54f809e42cb8bcef04800ff43729b09"} Jan 26 11:00:59 crc kubenswrapper[5003]: I0126 11:00:59.176621 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:00:59 crc kubenswrapper[5003]: E0126 11:00:59.176903 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:00:59 crc kubenswrapper[5003]: E0126 11:00:59.177119 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:00:59 crc kubenswrapper[5003]: E0126 11:00:59.177185 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift podName:b1cd12b2-b1e1-4085-b627-52c229799294 nodeName:}" failed. No retries permitted until 2026-01-26 11:01:01.177167308 +0000 UTC m=+1076.718392869 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift") pod "swift-storage-0" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294") : configmap "swift-ring-files" not found Jan 26 11:00:59 crc kubenswrapper[5003]: I0126 11:00:59.278180 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:00:59 crc kubenswrapper[5003]: E0126 11:00:59.278331 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:00:59 crc kubenswrapper[5003]: E0126 11:00:59.278344 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf: configmap "swift-ring-files" not found Jan 26 11:00:59 crc kubenswrapper[5003]: E0126 11:00:59.278409 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift podName:2a9437ea-d818-4371-907f-ef50e49fab62 nodeName:}" failed. No retries permitted until 2026-01-26 11:01:00.278392882 +0000 UTC m=+1075.819618443 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift") pod "swift-proxy-67f6cc5479-gvvmf" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62") : configmap "swift-ring-files" not found Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.129007 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-cron-29490421-k962l"] Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.129998 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.153568 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-cron-29490421-k962l"] Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.194212 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-fernet-keys\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.194349 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hnnv\" (UniqueName: \"kubernetes.io/projected/e90022bf-9ff0-432e-92ea-8059ae78aada-kube-api-access-2hnnv\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.194409 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-config-data\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.296322 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.296413 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-fernet-keys\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.296473 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hnnv\" (UniqueName: \"kubernetes.io/projected/e90022bf-9ff0-432e-92ea-8059ae78aada-kube-api-access-2hnnv\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.296519 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-config-data\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: E0126 11:01:00.297088 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:01:00 crc kubenswrapper[5003]: E0126 11:01:00.297135 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf: configmap "swift-ring-files" not found Jan 26 11:01:00 crc kubenswrapper[5003]: E0126 11:01:00.297620 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift podName:2a9437ea-d818-4371-907f-ef50e49fab62 nodeName:}" failed. No retries permitted until 2026-01-26 11:01:02.297591666 +0000 UTC m=+1077.838817227 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift") pod "swift-proxy-67f6cc5479-gvvmf" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62") : configmap "swift-ring-files" not found Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.304180 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-config-data\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.312579 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-fernet-keys\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.319632 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hnnv\" (UniqueName: \"kubernetes.io/projected/e90022bf-9ff0-432e-92ea-8059ae78aada-kube-api-access-2hnnv\") pod \"keystone-cron-29490421-k962l\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:00 crc kubenswrapper[5003]: I0126 11:01:00.528471 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:01 crc kubenswrapper[5003]: I0126 11:01:01.222531 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:01:01 crc kubenswrapper[5003]: E0126 11:01:01.222713 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:01:01 crc kubenswrapper[5003]: E0126 11:01:01.222728 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:01:01 crc kubenswrapper[5003]: E0126 11:01:01.222769 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift podName:b1cd12b2-b1e1-4085-b627-52c229799294 nodeName:}" failed. No retries permitted until 2026-01-26 11:01:05.222754853 +0000 UTC m=+1080.763980414 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift") pod "swift-storage-0" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294") : configmap "swift-ring-files" not found Jan 26 11:01:02 crc kubenswrapper[5003]: I0126 11:01:02.340149 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:02 crc kubenswrapper[5003]: E0126 11:01:02.340350 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:01:02 crc kubenswrapper[5003]: E0126 11:01:02.340370 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf: configmap "swift-ring-files" not found Jan 26 11:01:02 crc kubenswrapper[5003]: E0126 11:01:02.340430 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift podName:2a9437ea-d818-4371-907f-ef50e49fab62 nodeName:}" failed. No retries permitted until 2026-01-26 11:01:06.340412243 +0000 UTC m=+1081.881637804 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift") pod "swift-proxy-67f6cc5479-gvvmf" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62") : configmap "swift-ring-files" not found Jan 26 11:01:02 crc kubenswrapper[5003]: I0126 11:01:02.743597 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-cron-29490421-k962l"] Jan 26 11:01:02 crc kubenswrapper[5003]: W0126 11:01:02.754905 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode90022bf_9ff0_432e_92ea_8059ae78aada.slice/crio-be72e1a1698e006390c260cdf2e229b7637f8af5edf04b7d08ef4d638ac25c9a WatchSource:0}: Error finding container be72e1a1698e006390c260cdf2e229b7637f8af5edf04b7d08ef4d638ac25c9a: Status 404 returned error can't find the container with id be72e1a1698e006390c260cdf2e229b7637f8af5edf04b7d08ef4d638ac25c9a Jan 26 11:01:03 crc kubenswrapper[5003]: I0126 11:01:03.177349 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" event={"ID":"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e","Type":"ContainerStarted","Data":"0c027d858c4da8f5f47b15b900f9a9f7e93e78053e7a49d474bac09254c88b7b"} Jan 26 11:01:03 crc kubenswrapper[5003]: I0126 11:01:03.180351 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-cron-29490421-k962l" event={"ID":"e90022bf-9ff0-432e-92ea-8059ae78aada","Type":"ContainerStarted","Data":"e2d12eed1bca9a1a4a3763846200ba27d0f740f24867c893379ced65f6063f75"} Jan 26 11:01:03 crc kubenswrapper[5003]: I0126 11:01:03.180390 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-cron-29490421-k962l" event={"ID":"e90022bf-9ff0-432e-92ea-8059ae78aada","Type":"ContainerStarted","Data":"be72e1a1698e006390c260cdf2e229b7637f8af5edf04b7d08ef4d638ac25c9a"} Jan 26 11:01:03 crc kubenswrapper[5003]: I0126 11:01:03.196019 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" podStartSLOduration=2.562863551 podStartE2EDuration="6.195998244s" podCreationTimestamp="2026-01-26 11:00:57 +0000 UTC" firstStartedPulling="2026-01-26 11:00:58.761301809 +0000 UTC m=+1074.302527370" lastFinishedPulling="2026-01-26 11:01:02.394436502 +0000 UTC m=+1077.935662063" observedRunningTime="2026-01-26 11:01:03.195099479 +0000 UTC m=+1078.736325060" watchObservedRunningTime="2026-01-26 11:01:03.195998244 +0000 UTC m=+1078.737223805" Jan 26 11:01:05 crc kubenswrapper[5003]: I0126 11:01:05.194880 5003 generic.go:334] "Generic (PLEG): container finished" podID="e90022bf-9ff0-432e-92ea-8059ae78aada" containerID="e2d12eed1bca9a1a4a3763846200ba27d0f740f24867c893379ced65f6063f75" exitCode=0 Jan 26 11:01:05 crc kubenswrapper[5003]: I0126 11:01:05.195186 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-cron-29490421-k962l" event={"ID":"e90022bf-9ff0-432e-92ea-8059ae78aada","Type":"ContainerDied","Data":"e2d12eed1bca9a1a4a3763846200ba27d0f740f24867c893379ced65f6063f75"} Jan 26 11:01:05 crc kubenswrapper[5003]: I0126 11:01:05.286623 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:01:05 crc kubenswrapper[5003]: E0126 11:01:05.286834 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:01:05 crc kubenswrapper[5003]: E0126 11:01:05.286852 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:01:05 crc kubenswrapper[5003]: E0126 11:01:05.286893 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift podName:b1cd12b2-b1e1-4085-b627-52c229799294 nodeName:}" failed. No retries permitted until 2026-01-26 11:01:13.286879279 +0000 UTC m=+1088.828104840 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift") pod "swift-storage-0" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294") : configmap "swift-ring-files" not found Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.404843 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:06 crc kubenswrapper[5003]: E0126 11:01:06.405146 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:01:06 crc kubenswrapper[5003]: E0126 11:01:06.405204 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf: configmap "swift-ring-files" not found Jan 26 11:01:06 crc kubenswrapper[5003]: E0126 11:01:06.405338 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift podName:2a9437ea-d818-4371-907f-ef50e49fab62 nodeName:}" failed. No retries permitted until 2026-01-26 11:01:14.4053024 +0000 UTC m=+1089.946527971 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift") pod "swift-proxy-67f6cc5479-gvvmf" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62") : configmap "swift-ring-files" not found Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.519426 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.609378 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-fernet-keys\") pod \"e90022bf-9ff0-432e-92ea-8059ae78aada\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.609475 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-config-data\") pod \"e90022bf-9ff0-432e-92ea-8059ae78aada\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.609639 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hnnv\" (UniqueName: \"kubernetes.io/projected/e90022bf-9ff0-432e-92ea-8059ae78aada-kube-api-access-2hnnv\") pod \"e90022bf-9ff0-432e-92ea-8059ae78aada\" (UID: \"e90022bf-9ff0-432e-92ea-8059ae78aada\") " Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.615402 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e90022bf-9ff0-432e-92ea-8059ae78aada-kube-api-access-2hnnv" (OuterVolumeSpecName: "kube-api-access-2hnnv") pod "e90022bf-9ff0-432e-92ea-8059ae78aada" (UID: "e90022bf-9ff0-432e-92ea-8059ae78aada"). InnerVolumeSpecName "kube-api-access-2hnnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.615924 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e90022bf-9ff0-432e-92ea-8059ae78aada" (UID: "e90022bf-9ff0-432e-92ea-8059ae78aada"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.645229 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-config-data" (OuterVolumeSpecName: "config-data") pod "e90022bf-9ff0-432e-92ea-8059ae78aada" (UID: "e90022bf-9ff0-432e-92ea-8059ae78aada"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.711226 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hnnv\" (UniqueName: \"kubernetes.io/projected/e90022bf-9ff0-432e-92ea-8059ae78aada-kube-api-access-2hnnv\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.711257 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:06 crc kubenswrapper[5003]: I0126 11:01:06.711265 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e90022bf-9ff0-432e-92ea-8059ae78aada-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:07 crc kubenswrapper[5003]: I0126 11:01:07.211643 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-cron-29490421-k962l" event={"ID":"e90022bf-9ff0-432e-92ea-8059ae78aada","Type":"ContainerDied","Data":"be72e1a1698e006390c260cdf2e229b7637f8af5edf04b7d08ef4d638ac25c9a"} Jan 26 11:01:07 crc kubenswrapper[5003]: I0126 11:01:07.211688 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be72e1a1698e006390c260cdf2e229b7637f8af5edf04b7d08ef4d638ac25c9a" Jan 26 11:01:07 crc kubenswrapper[5003]: I0126 11:01:07.211717 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-cron-29490421-k962l" Jan 26 11:01:11 crc kubenswrapper[5003]: I0126 11:01:11.242310 5003 generic.go:334] "Generic (PLEG): container finished" podID="ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" containerID="0c027d858c4da8f5f47b15b900f9a9f7e93e78053e7a49d474bac09254c88b7b" exitCode=0 Jan 26 11:01:11 crc kubenswrapper[5003]: I0126 11:01:11.242386 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" event={"ID":"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e","Type":"ContainerDied","Data":"0c027d858c4da8f5f47b15b900f9a9f7e93e78053e7a49d474bac09254c88b7b"} Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.499247 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.617217 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-ring-data-devices\") pod \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.617373 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-dispersionconf\") pod \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.617412 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-swiftconf\") pod \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.617506 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-etc-swift\") pod \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.617555 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxhs6\" (UniqueName: \"kubernetes.io/projected/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-kube-api-access-jxhs6\") pod \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.617660 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-scripts\") pod \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\" (UID: \"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e\") " Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.618581 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" (UID: "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.618640 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" (UID: "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.631193 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-kube-api-access-jxhs6" (OuterVolumeSpecName: "kube-api-access-jxhs6") pod "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" (UID: "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e"). InnerVolumeSpecName "kube-api-access-jxhs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.639340 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" (UID: "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.644255 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" (UID: "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.649021 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-scripts" (OuterVolumeSpecName: "scripts") pod "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" (UID: "ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.719481 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.719517 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxhs6\" (UniqueName: \"kubernetes.io/projected/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-kube-api-access-jxhs6\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.719528 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.719536 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.719545 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:12 crc kubenswrapper[5003]: I0126 11:01:12.719553 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:13 crc kubenswrapper[5003]: I0126 11:01:13.258172 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" event={"ID":"ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e","Type":"ContainerDied","Data":"d248e6d78e61c401845cbc0e40db9ea8f54f809e42cb8bcef04800ff43729b09"} Jan 26 11:01:13 crc kubenswrapper[5003]: I0126 11:01:13.258215 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d248e6d78e61c401845cbc0e40db9ea8f54f809e42cb8bcef04800ff43729b09" Jan 26 11:01:13 crc kubenswrapper[5003]: I0126 11:01:13.258253 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-qq2x8" Jan 26 11:01:13 crc kubenswrapper[5003]: I0126 11:01:13.328272 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:01:13 crc kubenswrapper[5003]: I0126 11:01:13.335281 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"swift-storage-0\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:01:13 crc kubenswrapper[5003]: I0126 11:01:13.355842 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:01:13 crc kubenswrapper[5003]: I0126 11:01:13.505880 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:13 crc kubenswrapper[5003]: I0126 11:01:13.840187 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:01:13 crc kubenswrapper[5003]: W0126 11:01:13.849558 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1cd12b2_b1e1_4085_b627_52c229799294.slice/crio-6c2939cb07b2a5206f72cbe482dccac648773163f192f36d2ffe8be26162d9f3 WatchSource:0}: Error finding container 6c2939cb07b2a5206f72cbe482dccac648773163f192f36d2ffe8be26162d9f3: Status 404 returned error can't find the container with id 6c2939cb07b2a5206f72cbe482dccac648773163f192f36d2ffe8be26162d9f3 Jan 26 11:01:14 crc kubenswrapper[5003]: I0126 11:01:14.266143 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"6c2939cb07b2a5206f72cbe482dccac648773163f192f36d2ffe8be26162d9f3"} Jan 26 11:01:14 crc kubenswrapper[5003]: I0126 11:01:14.447588 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:14 crc kubenswrapper[5003]: I0126 11:01:14.457476 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"swift-proxy-67f6cc5479-gvvmf\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:14 crc kubenswrapper[5003]: I0126 11:01:14.476478 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:14 crc kubenswrapper[5003]: I0126 11:01:14.883846 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf"] Jan 26 11:01:14 crc kubenswrapper[5003]: W0126 11:01:14.894530 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a9437ea_d818_4371_907f_ef50e49fab62.slice/crio-5723774673dd0ed1b26c73df12ee771aca77fc4c06ba4b970fc03cd073aa6a97 WatchSource:0}: Error finding container 5723774673dd0ed1b26c73df12ee771aca77fc4c06ba4b970fc03cd073aa6a97: Status 404 returned error can't find the container with id 5723774673dd0ed1b26c73df12ee771aca77fc4c06ba4b970fc03cd073aa6a97 Jan 26 11:01:15 crc kubenswrapper[5003]: I0126 11:01:15.068122 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:15 crc kubenswrapper[5003]: I0126 11:01:15.273446 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" event={"ID":"2a9437ea-d818-4371-907f-ef50e49fab62","Type":"ContainerStarted","Data":"5723774673dd0ed1b26c73df12ee771aca77fc4c06ba4b970fc03cd073aa6a97"} Jan 26 11:01:16 crc kubenswrapper[5003]: I0126 11:01:16.282029 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" event={"ID":"2a9437ea-d818-4371-907f-ef50e49fab62","Type":"ContainerStarted","Data":"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d"} Jan 26 11:01:16 crc kubenswrapper[5003]: I0126 11:01:16.682172 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:17 crc kubenswrapper[5003]: I0126 11:01:17.308407 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" event={"ID":"2a9437ea-d818-4371-907f-ef50e49fab62","Type":"ContainerStarted","Data":"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d"} Jan 26 11:01:17 crc kubenswrapper[5003]: I0126 11:01:17.308878 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:17 crc kubenswrapper[5003]: I0126 11:01:17.308984 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:17 crc kubenswrapper[5003]: I0126 11:01:17.312540 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64"} Jan 26 11:01:17 crc kubenswrapper[5003]: I0126 11:01:17.312600 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1"} Jan 26 11:01:17 crc kubenswrapper[5003]: I0126 11:01:17.332040 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" podStartSLOduration=19.332012791 podStartE2EDuration="19.332012791s" podCreationTimestamp="2026-01-26 11:00:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:01:17.325983538 +0000 UTC m=+1092.867209109" watchObservedRunningTime="2026-01-26 11:01:17.332012791 +0000 UTC m=+1092.873238352" Jan 26 11:01:18 crc kubenswrapper[5003]: I0126 11:01:18.312731 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:18 crc kubenswrapper[5003]: I0126 11:01:18.322092 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e"} Jan 26 11:01:18 crc kubenswrapper[5003]: I0126 11:01:18.322142 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1"} Jan 26 11:01:19 crc kubenswrapper[5003]: I0126 11:01:19.332988 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af"} Jan 26 11:01:19 crc kubenswrapper[5003]: I0126 11:01:19.333454 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6"} Jan 26 11:01:19 crc kubenswrapper[5003]: I0126 11:01:19.910999 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:20 crc kubenswrapper[5003]: I0126 11:01:20.359364 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666"} Jan 26 11:01:20 crc kubenswrapper[5003]: I0126 11:01:20.359408 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548"} Jan 26 11:01:21 crc kubenswrapper[5003]: I0126 11:01:21.370928 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446"} Jan 26 11:01:21 crc kubenswrapper[5003]: I0126 11:01:21.371352 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05"} Jan 26 11:01:21 crc kubenswrapper[5003]: I0126 11:01:21.371364 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd"} Jan 26 11:01:21 crc kubenswrapper[5003]: I0126 11:01:21.474161 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:22 crc kubenswrapper[5003]: I0126 11:01:22.387877 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3"} Jan 26 11:01:22 crc kubenswrapper[5003]: I0126 11:01:22.388437 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705"} Jan 26 11:01:22 crc kubenswrapper[5003]: I0126 11:01:22.388456 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680"} Jan 26 11:01:23 crc kubenswrapper[5003]: I0126 11:01:23.140583 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:23 crc kubenswrapper[5003]: I0126 11:01:23.403769 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerStarted","Data":"5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d"} Jan 26 11:01:23 crc kubenswrapper[5003]: I0126 11:01:23.439942 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=20.455410257 podStartE2EDuration="27.439924332s" podCreationTimestamp="2026-01-26 11:00:56 +0000 UTC" firstStartedPulling="2026-01-26 11:01:13.852613987 +0000 UTC m=+1089.393839548" lastFinishedPulling="2026-01-26 11:01:20.837128062 +0000 UTC m=+1096.378353623" observedRunningTime="2026-01-26 11:01:23.4330324 +0000 UTC m=+1098.974257971" watchObservedRunningTime="2026-01-26 11:01:23.439924332 +0000 UTC m=+1098.981149893" Jan 26 11:01:24 crc kubenswrapper[5003]: I0126 11:01:24.479455 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:24 crc kubenswrapper[5003]: I0126 11:01:24.479778 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:24 crc kubenswrapper[5003]: I0126 11:01:24.731044 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:26 crc kubenswrapper[5003]: I0126 11:01:26.338094 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-qq2x8_ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/swift-ring-rebalance/0.log" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.765069 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:01:27 crc kubenswrapper[5003]: E0126 11:01:27.766748 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" containerName="swift-ring-rebalance" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.766871 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" containerName="swift-ring-rebalance" Jan 26 11:01:27 crc kubenswrapper[5003]: E0126 11:01:27.766970 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e90022bf-9ff0-432e-92ea-8059ae78aada" containerName="keystone-cron" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.767062 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e90022bf-9ff0-432e-92ea-8059ae78aada" containerName="keystone-cron" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.767378 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" containerName="swift-ring-rebalance" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.767513 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e90022bf-9ff0-432e-92ea-8059ae78aada" containerName="keystone-cron" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.772707 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.780074 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.792717 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.792866 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.823823 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916155 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-etc-swift\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916242 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-etc-swift\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916306 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-lock\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916339 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkcg6\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-kube-api-access-vkcg6\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916375 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-lock\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916463 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znxs4\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-kube-api-access-znxs4\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916520 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-cache\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916536 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-cache\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916600 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:27 crc kubenswrapper[5003]: I0126 11:01:27.916618 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.017996 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-cache\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018083 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018101 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018128 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-etc-swift\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018154 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-etc-swift\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018168 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-lock\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018190 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkcg6\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-kube-api-access-vkcg6\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018222 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-lock\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018261 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znxs4\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-kube-api-access-znxs4\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018667 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") device mount path \"/mnt/openstack/pv09\"" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018860 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") device mount path \"/mnt/openstack/pv05\"" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018898 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-cache\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.018952 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-cache\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.019440 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-lock\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.019508 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-cache\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.019972 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-lock\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.028107 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-etc-swift\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.028309 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-etc-swift\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.036655 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkcg6\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-kube-api-access-vkcg6\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.045231 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znxs4\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-kube-api-access-znxs4\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.046337 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-2\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.065021 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-1\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.095255 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.120907 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.541539 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:01:28 crc kubenswrapper[5003]: W0126 11:01:28.545604 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf16c3bea_96a9_4034_8848_de4d7bbcb6b5.slice/crio-5a4eb25959254562739801e0f898d0c4f367dd7f9bd2b17c11f2ce2d39703966 WatchSource:0}: Error finding container 5a4eb25959254562739801e0f898d0c4f367dd7f9bd2b17c11f2ce2d39703966: Status 404 returned error can't find the container with id 5a4eb25959254562739801e0f898d0c4f367dd7f9bd2b17c11f2ce2d39703966 Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.644260 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:01:28 crc kubenswrapper[5003]: W0126 11:01:28.648624 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod980cff26_19b2_457a_a90f_b6acec8de879.slice/crio-272993c17960c33ffdf6d1529a81a1a7b0f9bcc8cdf73b78636648c1e33eaaad WatchSource:0}: Error finding container 272993c17960c33ffdf6d1529a81a1a7b0f9bcc8cdf73b78636648c1e33eaaad: Status 404 returned error can't find the container with id 272993c17960c33ffdf6d1529a81a1a7b0f9bcc8cdf73b78636648c1e33eaaad Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.830765 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-qq2x8"] Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.839313 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-qq2x8"] Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.854824 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-nnj57"] Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.855777 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.859365 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.859998 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.886550 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-nnj57"] Jan 26 11:01:28 crc kubenswrapper[5003]: E0126 11:01:28.891152 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[dispersionconf etc-swift kube-api-access-7l9vv ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" podUID="e8cfd216-c98d-4595-9c66-b33042952c93" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.921490 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6vcpn"] Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.922407 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.935921 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-dispersionconf\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.935986 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l9vv\" (UniqueName: \"kubernetes.io/projected/e8cfd216-c98d-4595-9c66-b33042952c93-kube-api-access-7l9vv\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.936027 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-swiftconf\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.936072 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-ring-data-devices\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.936112 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e8cfd216-c98d-4595-9c66-b33042952c93-etc-swift\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.936179 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-scripts\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.948902 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6vcpn"] Jan 26 11:01:28 crc kubenswrapper[5003]: I0126 11:01:28.955805 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-nnj57"] Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.022441 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e" path="/var/lib/kubelet/pods/ee34f03c-51bf-4cc8-b6d3-bd4c49f70f0e/volumes" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037294 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-scripts\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037355 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-dispersionconf\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037387 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l9vv\" (UniqueName: \"kubernetes.io/projected/e8cfd216-c98d-4595-9c66-b33042952c93-kube-api-access-7l9vv\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037414 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-swiftconf\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037436 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-etc-swift\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037457 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-swiftconf\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037474 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml97v\" (UniqueName: \"kubernetes.io/projected/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-kube-api-access-ml97v\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037580 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-ring-data-devices\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037632 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-scripts\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037663 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e8cfd216-c98d-4595-9c66-b33042952c93-etc-swift\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037774 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-ring-data-devices\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.037816 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-dispersionconf\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.038026 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e8cfd216-c98d-4595-9c66-b33042952c93-etc-swift\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.038092 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-scripts\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.038308 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-ring-data-devices\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.045088 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-dispersionconf\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.048187 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-swiftconf\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.056704 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l9vv\" (UniqueName: \"kubernetes.io/projected/e8cfd216-c98d-4595-9c66-b33042952c93-kube-api-access-7l9vv\") pod \"swift-ring-rebalance-nnj57\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.140085 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-etc-swift\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.140160 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-swiftconf\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.140183 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml97v\" (UniqueName: \"kubernetes.io/projected/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-kube-api-access-ml97v\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.140337 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-scripts\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.140688 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-ring-data-devices\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.140761 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-dispersionconf\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.142488 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-scripts\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.143077 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-etc-swift\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.144465 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-ring-data-devices\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.146946 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-dispersionconf\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.163878 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-swiftconf\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.171621 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml97v\" (UniqueName: \"kubernetes.io/projected/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-kube-api-access-ml97v\") pod \"swift-ring-rebalance-6vcpn\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.271798 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.552239 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"d0ee0f1ae0834b8c92fa8008b131bef63fb72cec700023820e98184dec9e6aad"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.552309 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"40f68e661a9dd524e00f97a9724bddfe1020d8ccf737c7184461d1bc029d39cd"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.552320 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"2fc7a4d553254bcfd70a6603c24123f5313042a2a942cd5c7ec6a56de544854a"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.552329 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"272993c17960c33ffdf6d1529a81a1a7b0f9bcc8cdf73b78636648c1e33eaaad"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.554345 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.554850 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"b0a8a7d412bfe444befc54bd057301ba0b92fe7537adec15f35659f2c2a6262e"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.554871 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"828905524a607e4481f9f56be6aea9ed539eef768da3cb769bba5d4bd9185a41"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.554881 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"c5b0c6029e6937dc34433353c875e403332f61fea1a6089932ce0b2f9e2231cb"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.554891 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"6cf23814347ea3b8ff01a79b40bc34c3613bfc514f28d734fc77c3fb9f66bd7c"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.554900 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"5a4eb25959254562739801e0f898d0c4f367dd7f9bd2b17c11f2ce2d39703966"} Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.620620 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.771923 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-ring-data-devices\") pod \"e8cfd216-c98d-4595-9c66-b33042952c93\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.772365 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-dispersionconf\") pod \"e8cfd216-c98d-4595-9c66-b33042952c93\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.772428 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e8cfd216-c98d-4595-9c66-b33042952c93-etc-swift\") pod \"e8cfd216-c98d-4595-9c66-b33042952c93\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.772501 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-swiftconf\") pod \"e8cfd216-c98d-4595-9c66-b33042952c93\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.772559 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-scripts\") pod \"e8cfd216-c98d-4595-9c66-b33042952c93\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.772640 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l9vv\" (UniqueName: \"kubernetes.io/projected/e8cfd216-c98d-4595-9c66-b33042952c93-kube-api-access-7l9vv\") pod \"e8cfd216-c98d-4595-9c66-b33042952c93\" (UID: \"e8cfd216-c98d-4595-9c66-b33042952c93\") " Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.775052 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8cfd216-c98d-4595-9c66-b33042952c93-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e8cfd216-c98d-4595-9c66-b33042952c93" (UID: "e8cfd216-c98d-4595-9c66-b33042952c93"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.775329 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "e8cfd216-c98d-4595-9c66-b33042952c93" (UID: "e8cfd216-c98d-4595-9c66-b33042952c93"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.779922 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-scripts" (OuterVolumeSpecName: "scripts") pod "e8cfd216-c98d-4595-9c66-b33042952c93" (UID: "e8cfd216-c98d-4595-9c66-b33042952c93"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.781579 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8cfd216-c98d-4595-9c66-b33042952c93-kube-api-access-7l9vv" (OuterVolumeSpecName: "kube-api-access-7l9vv") pod "e8cfd216-c98d-4595-9c66-b33042952c93" (UID: "e8cfd216-c98d-4595-9c66-b33042952c93"). InnerVolumeSpecName "kube-api-access-7l9vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.782154 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "e8cfd216-c98d-4595-9c66-b33042952c93" (UID: "e8cfd216-c98d-4595-9c66-b33042952c93"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.807587 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "e8cfd216-c98d-4595-9c66-b33042952c93" (UID: "e8cfd216-c98d-4595-9c66-b33042952c93"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.875092 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.875133 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.875148 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l9vv\" (UniqueName: \"kubernetes.io/projected/e8cfd216-c98d-4595-9c66-b33042952c93-kube-api-access-7l9vv\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.875164 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e8cfd216-c98d-4595-9c66-b33042952c93-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.875178 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e8cfd216-c98d-4595-9c66-b33042952c93-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:29 crc kubenswrapper[5003]: I0126 11:01:29.875189 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e8cfd216-c98d-4595-9c66-b33042952c93-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.231555 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6vcpn"] Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.566420 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" event={"ID":"a0556bc9-3e95-49cb-a5f3-38cd61fbf173","Type":"ContainerStarted","Data":"4850a98216af13b384119f4d77428a57c0245a54d33a10ae3860b9b112a25a19"} Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.571081 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"cf124e879cc2c0f0ae011f0c34385d51444c59a3492a0303e60d8161cbc9321e"} Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.571123 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"4b5e0284abaaa6fc5f1745a23ed3352066d05ab79c3c7c044132fffd49069085"} Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.571135 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"223f2d4605898f119dbf0429c94380cc06f6771657025bb95012a8194c5c6c8c"} Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.579961 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-nnj57" Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.580357 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"5cdaea9b636f11d0fb48e590deae50e2c4e15c0c8f5d4c656019ede0b2b44a63"} Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.580402 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"5b84314763903eda18ff9726b3817ee45dc85ecaf53a0756d21a03cb7724fd72"} Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.580413 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"ae8e8ebec6a74056fa7110c0c98ddfb1321b868bef9b07236ded09565e4d7dc4"} Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.580421 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"863cbbb42dda1f2f274f0f2c3452a7cb31cc6e098ed381e878d0200619ba1b04"} Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.634180 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-nnj57"] Jan 26 11:01:30 crc kubenswrapper[5003]: I0126 11:01:30.638218 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-nnj57"] Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.073519 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8cfd216-c98d-4595-9c66-b33042952c93" path="/var/lib/kubelet/pods/e8cfd216-c98d-4595-9c66-b33042952c93/volumes" Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.647852 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" event={"ID":"a0556bc9-3e95-49cb-a5f3-38cd61fbf173","Type":"ContainerStarted","Data":"2827e0c3a3aa8528c01134e6d85f5916096ce96047f70264b6cd6bf7110736e3"} Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.655258 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"89ccdc73c6e6eb130b5e9248e40895a08f4cff58ffa60a751802e275bf53e801"} Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.655303 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"ab22dfa83cbe9bb9a6d88dddc446c510d433ba02c0ee0eb84bbc1e2f8efd3b96"} Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.655313 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"aa7bcf55cc48b89ce5714a5f9d10bec01f96042d06feff52fb249c766c509546"} Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.675704 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" podStartSLOduration=3.675686786 podStartE2EDuration="3.675686786s" podCreationTimestamp="2026-01-26 11:01:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:01:31.670497511 +0000 UTC m=+1107.211723072" watchObservedRunningTime="2026-01-26 11:01:31.675686786 +0000 UTC m=+1107.216912347" Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.683162 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"4be5d77c647c6c4144f10a613250b882d016f92354befa8b49599944c17a919b"} Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.683206 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"912adaa9bef73e5f1d97033eb93b798efae215bd8772f43f103e39cf7aad8388"} Jan 26 11:01:31 crc kubenswrapper[5003]: I0126 11:01:31.683216 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"b9c6b3334e92374c6d259b7ddf52a594a345cfb04f8f2c05427f6062bf09785a"} Jan 26 11:01:32 crc kubenswrapper[5003]: I0126 11:01:32.809658 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"3b84daf1e1ebe868e5535ef06c08f90895842502ba9cb87c2e317de2e8a3b0a1"} Jan 26 11:01:32 crc kubenswrapper[5003]: I0126 11:01:32.809993 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"e587b56a1fee3e5498a7bf5cebf4a67b1d11d7fb571ba066fe4abbc70a333cd0"} Jan 26 11:01:32 crc kubenswrapper[5003]: I0126 11:01:32.810010 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"052b65a9db7b7f7ac1b5a0605df62902cad9ecdc3de5c5a5712bdb1cc615a4cf"} Jan 26 11:01:32 crc kubenswrapper[5003]: I0126 11:01:32.816042 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"82da56a790222dcbdea421219ef568934a45415dca26fc78c6cd9c631ecdaf76"} Jan 26 11:01:32 crc kubenswrapper[5003]: I0126 11:01:32.816063 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"99f69c88cdf13f6ba1e6d0cdf194bc16e66195d82b495b7921f58754a4510a65"} Jan 26 11:01:32 crc kubenswrapper[5003]: I0126 11:01:32.816073 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"6e6b84ceaef29c9be2a1d31c5585a12a7510e68a4029b56e2e32585b6a079e82"} Jan 26 11:01:33 crc kubenswrapper[5003]: I0126 11:01:33.830622 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"a024b8fd7dc8bd7cbf6095768209b0a3c16cfe3955353d3371acae31c684d648"} Jan 26 11:01:33 crc kubenswrapper[5003]: I0126 11:01:33.830957 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerStarted","Data":"46c426d4fe650f5e73c337596bdeb2b5dec8aeaa428f77d0f7ac065c13d8f552"} Jan 26 11:01:33 crc kubenswrapper[5003]: I0126 11:01:33.839897 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"2467dcc8c0218a5a485c7ccfb590d97d4ca41ea8627a790c4e2320823b03ad2f"} Jan 26 11:01:33 crc kubenswrapper[5003]: I0126 11:01:33.839931 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerStarted","Data":"202233758ef7a1a236827861b8e588e8b5f8373c3d8da3ee47ca61590a088834"} Jan 26 11:01:33 crc kubenswrapper[5003]: I0126 11:01:33.865990 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-2" podStartSLOduration=7.8659670219999995 podStartE2EDuration="7.865967022s" podCreationTimestamp="2026-01-26 11:01:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:01:33.862352081 +0000 UTC m=+1109.403577652" watchObservedRunningTime="2026-01-26 11:01:33.865967022 +0000 UTC m=+1109.407192583" Jan 26 11:01:33 crc kubenswrapper[5003]: I0126 11:01:33.897947 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-1" podStartSLOduration=7.897927512 podStartE2EDuration="7.897927512s" podCreationTimestamp="2026-01-26 11:01:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:01:33.895401292 +0000 UTC m=+1109.436626863" watchObservedRunningTime="2026-01-26 11:01:33.897927512 +0000 UTC m=+1109.439153073" Jan 26 11:01:39 crc kubenswrapper[5003]: I0126 11:01:39.040748 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:01:39 crc kubenswrapper[5003]: I0126 11:01:39.041110 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:01:42 crc kubenswrapper[5003]: I0126 11:01:42.938509 5003 generic.go:334] "Generic (PLEG): container finished" podID="a0556bc9-3e95-49cb-a5f3-38cd61fbf173" containerID="2827e0c3a3aa8528c01134e6d85f5916096ce96047f70264b6cd6bf7110736e3" exitCode=0 Jan 26 11:01:42 crc kubenswrapper[5003]: I0126 11:01:42.938616 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" event={"ID":"a0556bc9-3e95-49cb-a5f3-38cd61fbf173","Type":"ContainerDied","Data":"2827e0c3a3aa8528c01134e6d85f5916096ce96047f70264b6cd6bf7110736e3"} Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.183476 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.350324 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-ring-data-devices\") pod \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.350739 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-etc-swift\") pod \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.350931 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml97v\" (UniqueName: \"kubernetes.io/projected/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-kube-api-access-ml97v\") pod \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.351100 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-swiftconf\") pod \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.351229 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a0556bc9-3e95-49cb-a5f3-38cd61fbf173" (UID: "a0556bc9-3e95-49cb-a5f3-38cd61fbf173"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.351543 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-scripts\") pod \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.351632 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a0556bc9-3e95-49cb-a5f3-38cd61fbf173" (UID: "a0556bc9-3e95-49cb-a5f3-38cd61fbf173"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.351796 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-dispersionconf\") pod \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\" (UID: \"a0556bc9-3e95-49cb-a5f3-38cd61fbf173\") " Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.352390 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.352414 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.357888 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-kube-api-access-ml97v" (OuterVolumeSpecName: "kube-api-access-ml97v") pod "a0556bc9-3e95-49cb-a5f3-38cd61fbf173" (UID: "a0556bc9-3e95-49cb-a5f3-38cd61fbf173"). InnerVolumeSpecName "kube-api-access-ml97v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.374783 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-scripts" (OuterVolumeSpecName: "scripts") pod "a0556bc9-3e95-49cb-a5f3-38cd61fbf173" (UID: "a0556bc9-3e95-49cb-a5f3-38cd61fbf173"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.377455 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a0556bc9-3e95-49cb-a5f3-38cd61fbf173" (UID: "a0556bc9-3e95-49cb-a5f3-38cd61fbf173"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.380855 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a0556bc9-3e95-49cb-a5f3-38cd61fbf173" (UID: "a0556bc9-3e95-49cb-a5f3-38cd61fbf173"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.453968 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.454015 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.454065 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml97v\" (UniqueName: \"kubernetes.io/projected/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-kube-api-access-ml97v\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.454079 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a0556bc9-3e95-49cb-a5f3-38cd61fbf173-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.960808 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" event={"ID":"a0556bc9-3e95-49cb-a5f3-38cd61fbf173","Type":"ContainerDied","Data":"4850a98216af13b384119f4d77428a57c0245a54d33a10ae3860b9b112a25a19"} Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.960876 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4850a98216af13b384119f4d77428a57c0245a54d33a10ae3860b9b112a25a19" Jan 26 11:01:44 crc kubenswrapper[5003]: I0126 11:01:44.960935 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-6vcpn" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.187138 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz"] Jan 26 11:01:45 crc kubenswrapper[5003]: E0126 11:01:45.187475 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0556bc9-3e95-49cb-a5f3-38cd61fbf173" containerName="swift-ring-rebalance" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.187491 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0556bc9-3e95-49cb-a5f3-38cd61fbf173" containerName="swift-ring-rebalance" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.187661 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0556bc9-3e95-49cb-a5f3-38cd61fbf173" containerName="swift-ring-rebalance" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.188130 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.191480 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.191808 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.204715 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz"] Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.265612 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-ring-data-devices\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.265697 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-dispersionconf\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.265764 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-etc-swift\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.265795 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-swiftconf\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.265944 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j8nn\" (UniqueName: \"kubernetes.io/projected/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-kube-api-access-2j8nn\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.266084 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-scripts\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.367673 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-etc-swift\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.367747 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-swiftconf\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.367810 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j8nn\" (UniqueName: \"kubernetes.io/projected/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-kube-api-access-2j8nn\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.367957 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-scripts\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.367983 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-ring-data-devices\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.368436 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-etc-swift\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.368518 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-dispersionconf\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.368782 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-scripts\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.368856 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-ring-data-devices\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.372866 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-swiftconf\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.375500 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-dispersionconf\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.387164 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j8nn\" (UniqueName: \"kubernetes.io/projected/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-kube-api-access-2j8nn\") pod \"swift-ring-rebalance-debug-6gqdz\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.502493 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:45 crc kubenswrapper[5003]: I0126 11:01:45.970897 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz"] Jan 26 11:01:45 crc kubenswrapper[5003]: W0126 11:01:45.987462 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaefe5625_70ed_4a10_8046_a5a0acb4e4fc.slice/crio-9a79292127e7a841cae5702e40ad1e28097544cdbe6c50c804689195816a97b0 WatchSource:0}: Error finding container 9a79292127e7a841cae5702e40ad1e28097544cdbe6c50c804689195816a97b0: Status 404 returned error can't find the container with id 9a79292127e7a841cae5702e40ad1e28097544cdbe6c50c804689195816a97b0 Jan 26 11:01:46 crc kubenswrapper[5003]: I0126 11:01:46.984059 5003 generic.go:334] "Generic (PLEG): container finished" podID="aefe5625-70ed-4a10-8046-a5a0acb4e4fc" containerID="0cd6e749ba891948f51c520dd2c11b548d2d6e770a23c2ba6e17fd4af809cdad" exitCode=0 Jan 26 11:01:46 crc kubenswrapper[5003]: I0126 11:01:46.984169 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" event={"ID":"aefe5625-70ed-4a10-8046-a5a0acb4e4fc","Type":"ContainerDied","Data":"0cd6e749ba891948f51c520dd2c11b548d2d6e770a23c2ba6e17fd4af809cdad"} Jan 26 11:01:46 crc kubenswrapper[5003]: I0126 11:01:46.985454 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" event={"ID":"aefe5625-70ed-4a10-8046-a5a0acb4e4fc","Type":"ContainerStarted","Data":"9a79292127e7a841cae5702e40ad1e28097544cdbe6c50c804689195816a97b0"} Jan 26 11:01:47 crc kubenswrapper[5003]: I0126 11:01:47.026705 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz"] Jan 26 11:01:47 crc kubenswrapper[5003]: I0126 11:01:47.038860 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz"] Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.281265 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.417722 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-ring-data-devices\") pod \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.417838 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-swiftconf\") pod \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.417905 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-etc-swift\") pod \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.417958 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j8nn\" (UniqueName: \"kubernetes.io/projected/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-kube-api-access-2j8nn\") pod \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.418045 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-scripts\") pod \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.418084 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-dispersionconf\") pod \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\" (UID: \"aefe5625-70ed-4a10-8046-a5a0acb4e4fc\") " Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.420899 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "aefe5625-70ed-4a10-8046-a5a0acb4e4fc" (UID: "aefe5625-70ed-4a10-8046-a5a0acb4e4fc"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.421786 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "aefe5625-70ed-4a10-8046-a5a0acb4e4fc" (UID: "aefe5625-70ed-4a10-8046-a5a0acb4e4fc"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.431093 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-kube-api-access-2j8nn" (OuterVolumeSpecName: "kube-api-access-2j8nn") pod "aefe5625-70ed-4a10-8046-a5a0acb4e4fc" (UID: "aefe5625-70ed-4a10-8046-a5a0acb4e4fc"). InnerVolumeSpecName "kube-api-access-2j8nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.444683 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-crw22"] Jan 26 11:01:48 crc kubenswrapper[5003]: E0126 11:01:48.445372 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aefe5625-70ed-4a10-8046-a5a0acb4e4fc" containerName="swift-ring-rebalance" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.445631 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="aefe5625-70ed-4a10-8046-a5a0acb4e4fc" containerName="swift-ring-rebalance" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.445802 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="aefe5625-70ed-4a10-8046-a5a0acb4e4fc" containerName="swift-ring-rebalance" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.446408 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.448396 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-scripts" (OuterVolumeSpecName: "scripts") pod "aefe5625-70ed-4a10-8046-a5a0acb4e4fc" (UID: "aefe5625-70ed-4a10-8046-a5a0acb4e4fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.452466 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-crw22"] Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.482369 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "aefe5625-70ed-4a10-8046-a5a0acb4e4fc" (UID: "aefe5625-70ed-4a10-8046-a5a0acb4e4fc"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.489732 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "aefe5625-70ed-4a10-8046-a5a0acb4e4fc" (UID: "aefe5625-70ed-4a10-8046-a5a0acb4e4fc"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.519584 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2smc4\" (UniqueName: \"kubernetes.io/projected/ad757f65-110b-4d88-a83d-d1997664923d-kube-api-access-2smc4\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.519646 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-scripts\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.519709 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-dispersionconf\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.519752 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-ring-data-devices\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.519817 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-swiftconf\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.519951 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ad757f65-110b-4d88-a83d-d1997664923d-etc-swift\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.520215 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j8nn\" (UniqueName: \"kubernetes.io/projected/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-kube-api-access-2j8nn\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.520243 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.520258 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.520273 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.520303 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.520315 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aefe5625-70ed-4a10-8046-a5a0acb4e4fc-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.621555 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-scripts\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.621629 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-dispersionconf\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.621675 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-ring-data-devices\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.621728 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-swiftconf\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.621750 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ad757f65-110b-4d88-a83d-d1997664923d-etc-swift\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.621810 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2smc4\" (UniqueName: \"kubernetes.io/projected/ad757f65-110b-4d88-a83d-d1997664923d-kube-api-access-2smc4\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.622540 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ad757f65-110b-4d88-a83d-d1997664923d-etc-swift\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.622559 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-scripts\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.623032 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-ring-data-devices\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.625390 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-dispersionconf\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.626463 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-swiftconf\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.637614 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2smc4\" (UniqueName: \"kubernetes.io/projected/ad757f65-110b-4d88-a83d-d1997664923d-kube-api-access-2smc4\") pod \"swift-ring-rebalance-debug-crw22\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:48 crc kubenswrapper[5003]: I0126 11:01:48.867582 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:49 crc kubenswrapper[5003]: I0126 11:01:49.017537 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aefe5625-70ed-4a10-8046-a5a0acb4e4fc" path="/var/lib/kubelet/pods/aefe5625-70ed-4a10-8046-a5a0acb4e4fc/volumes" Jan 26 11:01:49 crc kubenswrapper[5003]: I0126 11:01:49.028380 5003 scope.go:117] "RemoveContainer" containerID="0cd6e749ba891948f51c520dd2c11b548d2d6e770a23c2ba6e17fd4af809cdad" Jan 26 11:01:49 crc kubenswrapper[5003]: I0126 11:01:49.028525 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-6gqdz" Jan 26 11:01:49 crc kubenswrapper[5003]: I0126 11:01:49.115963 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-crw22"] Jan 26 11:01:49 crc kubenswrapper[5003]: W0126 11:01:49.126366 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad757f65_110b_4d88_a83d_d1997664923d.slice/crio-cae7bbb8c042aa00e886912d9aec48a662f82619440052bb73449bed791f1621 WatchSource:0}: Error finding container cae7bbb8c042aa00e886912d9aec48a662f82619440052bb73449bed791f1621: Status 404 returned error can't find the container with id cae7bbb8c042aa00e886912d9aec48a662f82619440052bb73449bed791f1621 Jan 26 11:01:49 crc kubenswrapper[5003]: E0126 11:01:49.205166 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaefe5625_70ed_4a10_8046_a5a0acb4e4fc.slice\": RecentStats: unable to find data in memory cache]" Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.041996 5003 generic.go:334] "Generic (PLEG): container finished" podID="ad757f65-110b-4d88-a83d-d1997664923d" containerID="506ba4e738f2633da1c42bd98ac774567847d4bb450a9b9c6d96803a0344fb07" exitCode=0 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.042104 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" event={"ID":"ad757f65-110b-4d88-a83d-d1997664923d","Type":"ContainerDied","Data":"506ba4e738f2633da1c42bd98ac774567847d4bb450a9b9c6d96803a0344fb07"} Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.042639 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" event={"ID":"ad757f65-110b-4d88-a83d-d1997664923d","Type":"ContainerStarted","Data":"cae7bbb8c042aa00e886912d9aec48a662f82619440052bb73449bed791f1621"} Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.089589 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-crw22"] Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.098439 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-crw22"] Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.302655 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.303395 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-server" containerID="cri-o://6cf23814347ea3b8ff01a79b40bc34c3613bfc514f28d734fc77c3fb9f66bd7c" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304036 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="swift-recon-cron" containerID="cri-o://a024b8fd7dc8bd7cbf6095768209b0a3c16cfe3955353d3371acae31c684d648" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304107 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="rsync" containerID="cri-o://46c426d4fe650f5e73c337596bdeb2b5dec8aeaa428f77d0f7ac065c13d8f552" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304220 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-expirer" containerID="cri-o://82da56a790222dcbdea421219ef568934a45415dca26fc78c6cd9c631ecdaf76" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304312 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-updater" containerID="cri-o://99f69c88cdf13f6ba1e6d0cdf194bc16e66195d82b495b7921f58754a4510a65" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304372 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-auditor" containerID="cri-o://6e6b84ceaef29c9be2a1d31c5585a12a7510e68a4029b56e2e32585b6a079e82" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304429 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-replicator" containerID="cri-o://89ccdc73c6e6eb130b5e9248e40895a08f4cff58ffa60a751802e275bf53e801" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304484 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-server" containerID="cri-o://ab22dfa83cbe9bb9a6d88dddc446c510d433ba02c0ee0eb84bbc1e2f8efd3b96" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304535 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-updater" containerID="cri-o://aa7bcf55cc48b89ce5714a5f9d10bec01f96042d06feff52fb249c766c509546" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304593 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-auditor" containerID="cri-o://cf124e879cc2c0f0ae011f0c34385d51444c59a3492a0303e60d8161cbc9321e" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304642 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-replicator" containerID="cri-o://4b5e0284abaaa6fc5f1745a23ed3352066d05ab79c3c7c044132fffd49069085" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304682 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-server" containerID="cri-o://223f2d4605898f119dbf0429c94380cc06f6771657025bb95012a8194c5c6c8c" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304743 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-reaper" containerID="cri-o://b0a8a7d412bfe444befc54bd057301ba0b92fe7537adec15f35659f2c2a6262e" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304790 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-auditor" containerID="cri-o://828905524a607e4481f9f56be6aea9ed539eef768da3cb769bba5d4bd9185a41" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.304833 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-replicator" containerID="cri-o://c5b0c6029e6937dc34433353c875e403332f61fea1a6089932ce0b2f9e2231cb" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.353387 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354064 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-server" containerID="cri-o://2fc7a4d553254bcfd70a6603c24123f5313042a2a942cd5c7ec6a56de544854a" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354237 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="swift-recon-cron" containerID="cri-o://2467dcc8c0218a5a485c7ccfb590d97d4ca41ea8627a790c4e2320823b03ad2f" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354291 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="rsync" containerID="cri-o://202233758ef7a1a236827861b8e588e8b5f8373c3d8da3ee47ca61590a088834" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354331 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-expirer" containerID="cri-o://3b84daf1e1ebe868e5535ef06c08f90895842502ba9cb87c2e317de2e8a3b0a1" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354381 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-updater" containerID="cri-o://e587b56a1fee3e5498a7bf5cebf4a67b1d11d7fb571ba066fe4abbc70a333cd0" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354521 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-reaper" containerID="cri-o://863cbbb42dda1f2f274f0f2c3452a7cb31cc6e098ed381e878d0200619ba1b04" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354729 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-replicator" containerID="cri-o://40f68e661a9dd524e00f97a9724bddfe1020d8ccf737c7184461d1bc029d39cd" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354877 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-server" containerID="cri-o://912adaa9bef73e5f1d97033eb93b798efae215bd8772f43f103e39cf7aad8388" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.355474 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-auditor" containerID="cri-o://052b65a9db7b7f7ac1b5a0605df62902cad9ecdc3de5c5a5712bdb1cc615a4cf" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.355873 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-replicator" containerID="cri-o://4be5d77c647c6c4144f10a613250b882d016f92354befa8b49599944c17a919b" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.357899 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-updater" containerID="cri-o://b9c6b3334e92374c6d259b7ddf52a594a345cfb04f8f2c05427f6062bf09785a" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354420 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-server" containerID="cri-o://ae8e8ebec6a74056fa7110c0c98ddfb1321b868bef9b07236ded09565e4d7dc4" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.354516 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-replicator" containerID="cri-o://5b84314763903eda18ff9726b3817ee45dc85ecaf53a0756d21a03cb7724fd72" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.360923 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-auditor" containerID="cri-o://5cdaea9b636f11d0fb48e590deae50e2c4e15c0c8f5d4c656019ede0b2b44a63" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.361106 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-auditor" containerID="cri-o://d0ee0f1ae0834b8c92fa8008b131bef63fb72cec700023820e98184dec9e6aad" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.385471 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386191 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-server" containerID="cri-o://dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386416 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="swift-recon-cron" containerID="cri-o://5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386477 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="rsync" containerID="cri-o://1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386521 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-expirer" containerID="cri-o://aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386560 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-updater" containerID="cri-o://26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386601 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-auditor" containerID="cri-o://59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386643 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-replicator" containerID="cri-o://7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386694 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-server" containerID="cri-o://14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386753 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-updater" containerID="cri-o://242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386810 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-auditor" containerID="cri-o://88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386856 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-replicator" containerID="cri-o://1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386912 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-server" containerID="cri-o://f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.386969 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-reaper" containerID="cri-o://4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.387015 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-auditor" containerID="cri-o://a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.387066 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-replicator" containerID="cri-o://eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.481048 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6vcpn"] Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.562417 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6vcpn"] Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.579948 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf"] Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.580374 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-httpd" containerID="cri-o://388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.581104 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-server" containerID="cri-o://301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d" gracePeriod=30 Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.634651 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.101:8080/healthcheck\": EOF" Jan 26 11:01:50 crc kubenswrapper[5003]: I0126 11:01:50.742754 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.035355 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0556bc9-3e95-49cb-a5f3-38cd61fbf173" path="/var/lib/kubelet/pods/a0556bc9-3e95-49cb-a5f3-38cd61fbf173/volumes" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.090084 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.090123 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.090181 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.090222 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.126952 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="4be5d77c647c6c4144f10a613250b882d016f92354befa8b49599944c17a919b" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.127009 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="5cdaea9b636f11d0fb48e590deae50e2c4e15c0c8f5d4c656019ede0b2b44a63" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.127018 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="5b84314763903eda18ff9726b3817ee45dc85ecaf53a0756d21a03cb7724fd72" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.127023 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"4be5d77c647c6c4144f10a613250b882d016f92354befa8b49599944c17a919b"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.127109 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"5cdaea9b636f11d0fb48e590deae50e2c4e15c0c8f5d4c656019ede0b2b44a63"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.127122 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"5b84314763903eda18ff9726b3817ee45dc85ecaf53a0756d21a03cb7724fd72"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.137877 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="82da56a790222dcbdea421219ef568934a45415dca26fc78c6cd9c631ecdaf76" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.137929 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="99f69c88cdf13f6ba1e6d0cdf194bc16e66195d82b495b7921f58754a4510a65" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.137938 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="6e6b84ceaef29c9be2a1d31c5585a12a7510e68a4029b56e2e32585b6a079e82" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.137946 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="aa7bcf55cc48b89ce5714a5f9d10bec01f96042d06feff52fb249c766c509546" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.137953 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="4b5e0284abaaa6fc5f1745a23ed3352066d05ab79c3c7c044132fffd49069085" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.137961 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="b0a8a7d412bfe444befc54bd057301ba0b92fe7537adec15f35659f2c2a6262e" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.137967 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="c5b0c6029e6937dc34433353c875e403332f61fea1a6089932ce0b2f9e2231cb" exitCode=0 Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.138366 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"82da56a790222dcbdea421219ef568934a45415dca26fc78c6cd9c631ecdaf76"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.138417 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"99f69c88cdf13f6ba1e6d0cdf194bc16e66195d82b495b7921f58754a4510a65"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.138432 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"6e6b84ceaef29c9be2a1d31c5585a12a7510e68a4029b56e2e32585b6a079e82"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.138443 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"aa7bcf55cc48b89ce5714a5f9d10bec01f96042d06feff52fb249c766c509546"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.138456 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"4b5e0284abaaa6fc5f1745a23ed3352066d05ab79c3c7c044132fffd49069085"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.138468 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"b0a8a7d412bfe444befc54bd057301ba0b92fe7537adec15f35659f2c2a6262e"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.138482 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"c5b0c6029e6937dc34433353c875e403332f61fea1a6089932ce0b2f9e2231cb"} Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.366140 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.414664 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-swiftconf\") pod \"ad757f65-110b-4d88-a83d-d1997664923d\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.414843 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-ring-data-devices\") pod \"ad757f65-110b-4d88-a83d-d1997664923d\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.414883 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ad757f65-110b-4d88-a83d-d1997664923d-etc-swift\") pod \"ad757f65-110b-4d88-a83d-d1997664923d\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.414947 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-scripts\") pod \"ad757f65-110b-4d88-a83d-d1997664923d\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.414972 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-dispersionconf\") pod \"ad757f65-110b-4d88-a83d-d1997664923d\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.414995 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2smc4\" (UniqueName: \"kubernetes.io/projected/ad757f65-110b-4d88-a83d-d1997664923d-kube-api-access-2smc4\") pod \"ad757f65-110b-4d88-a83d-d1997664923d\" (UID: \"ad757f65-110b-4d88-a83d-d1997664923d\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.419552 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "ad757f65-110b-4d88-a83d-d1997664923d" (UID: "ad757f65-110b-4d88-a83d-d1997664923d"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.420392 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad757f65-110b-4d88-a83d-d1997664923d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "ad757f65-110b-4d88-a83d-d1997664923d" (UID: "ad757f65-110b-4d88-a83d-d1997664923d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.424797 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad757f65-110b-4d88-a83d-d1997664923d-kube-api-access-2smc4" (OuterVolumeSpecName: "kube-api-access-2smc4") pod "ad757f65-110b-4d88-a83d-d1997664923d" (UID: "ad757f65-110b-4d88-a83d-d1997664923d"). InnerVolumeSpecName "kube-api-access-2smc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.446646 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "ad757f65-110b-4d88-a83d-d1997664923d" (UID: "ad757f65-110b-4d88-a83d-d1997664923d"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.446905 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "ad757f65-110b-4d88-a83d-d1997664923d" (UID: "ad757f65-110b-4d88-a83d-d1997664923d"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.448660 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-scripts" (OuterVolumeSpecName: "scripts") pod "ad757f65-110b-4d88-a83d-d1997664923d" (UID: "ad757f65-110b-4d88-a83d-d1997664923d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.517603 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.517663 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ad757f65-110b-4d88-a83d-d1997664923d-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.517694 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad757f65-110b-4d88-a83d-d1997664923d-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.517707 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.517722 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2smc4\" (UniqueName: \"kubernetes.io/projected/ad757f65-110b-4d88-a83d-d1997664923d-kube-api-access-2smc4\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.517739 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ad757f65-110b-4d88-a83d-d1997664923d-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.691145 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.821694 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-run-httpd\") pod \"2a9437ea-d818-4371-907f-ef50e49fab62\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.821749 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-log-httpd\") pod \"2a9437ea-d818-4371-907f-ef50e49fab62\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.821800 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a9437ea-d818-4371-907f-ef50e49fab62-config-data\") pod \"2a9437ea-d818-4371-907f-ef50e49fab62\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.821854 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv4zk\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-kube-api-access-rv4zk\") pod \"2a9437ea-d818-4371-907f-ef50e49fab62\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.821958 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") pod \"2a9437ea-d818-4371-907f-ef50e49fab62\" (UID: \"2a9437ea-d818-4371-907f-ef50e49fab62\") " Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.822111 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2a9437ea-d818-4371-907f-ef50e49fab62" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.822271 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2a9437ea-d818-4371-907f-ef50e49fab62" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.822515 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.822534 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a9437ea-d818-4371-907f-ef50e49fab62-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.824840 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-kube-api-access-rv4zk" (OuterVolumeSpecName: "kube-api-access-rv4zk") pod "2a9437ea-d818-4371-907f-ef50e49fab62" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62"). InnerVolumeSpecName "kube-api-access-rv4zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.825586 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2a9437ea-d818-4371-907f-ef50e49fab62" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.856862 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a9437ea-d818-4371-907f-ef50e49fab62-config-data" (OuterVolumeSpecName: "config-data") pod "2a9437ea-d818-4371-907f-ef50e49fab62" (UID: "2a9437ea-d818-4371-907f-ef50e49fab62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.923756 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv4zk\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-kube-api-access-rv4zk\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.923793 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a9437ea-d818-4371-907f-ef50e49fab62-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:51 crc kubenswrapper[5003]: I0126 11:01:51.923803 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a9437ea-d818-4371-907f-ef50e49fab62-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154747 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154803 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154816 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154827 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154836 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154876 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154889 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154899 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154911 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154921 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154835 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154995 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155017 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155031 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155042 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155052 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155063 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155073 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155084 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155095 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.154951 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155106 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155115 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.155119 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163020 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="202233758ef7a1a236827861b8e588e8b5f8373c3d8da3ee47ca61590a088834" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163061 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="3b84daf1e1ebe868e5535ef06c08f90895842502ba9cb87c2e317de2e8a3b0a1" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163076 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="e587b56a1fee3e5498a7bf5cebf4a67b1d11d7fb571ba066fe4abbc70a333cd0" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163087 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="052b65a9db7b7f7ac1b5a0605df62902cad9ecdc3de5c5a5712bdb1cc615a4cf" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163098 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="912adaa9bef73e5f1d97033eb93b798efae215bd8772f43f103e39cf7aad8388" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163108 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="b9c6b3334e92374c6d259b7ddf52a594a345cfb04f8f2c05427f6062bf09785a" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163097 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"202233758ef7a1a236827861b8e588e8b5f8373c3d8da3ee47ca61590a088834"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163162 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"3b84daf1e1ebe868e5535ef06c08f90895842502ba9cb87c2e317de2e8a3b0a1"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163182 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"e587b56a1fee3e5498a7bf5cebf4a67b1d11d7fb571ba066fe4abbc70a333cd0"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163194 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"052b65a9db7b7f7ac1b5a0605df62902cad9ecdc3de5c5a5712bdb1cc615a4cf"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163206 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"912adaa9bef73e5f1d97033eb93b798efae215bd8772f43f103e39cf7aad8388"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163216 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"b9c6b3334e92374c6d259b7ddf52a594a345cfb04f8f2c05427f6062bf09785a"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163118 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="ae8e8ebec6a74056fa7110c0c98ddfb1321b868bef9b07236ded09565e4d7dc4" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163238 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="863cbbb42dda1f2f274f0f2c3452a7cb31cc6e098ed381e878d0200619ba1b04" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163251 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="d0ee0f1ae0834b8c92fa8008b131bef63fb72cec700023820e98184dec9e6aad" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163261 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="40f68e661a9dd524e00f97a9724bddfe1020d8ccf737c7184461d1bc029d39cd" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163270 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="2fc7a4d553254bcfd70a6603c24123f5313042a2a942cd5c7ec6a56de544854a" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163228 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"ae8e8ebec6a74056fa7110c0c98ddfb1321b868bef9b07236ded09565e4d7dc4"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"863cbbb42dda1f2f274f0f2c3452a7cb31cc6e098ed381e878d0200619ba1b04"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163392 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"d0ee0f1ae0834b8c92fa8008b131bef63fb72cec700023820e98184dec9e6aad"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163408 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"40f68e661a9dd524e00f97a9724bddfe1020d8ccf737c7184461d1bc029d39cd"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.163423 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"2fc7a4d553254bcfd70a6603c24123f5313042a2a942cd5c7ec6a56de544854a"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.165264 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-crw22" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.165268 5003 scope.go:117] "RemoveContainer" containerID="506ba4e738f2633da1c42bd98ac774567847d4bb450a9b9c6d96803a0344fb07" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.167679 5003 generic.go:334] "Generic (PLEG): container finished" podID="2a9437ea-d818-4371-907f-ef50e49fab62" containerID="301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.167716 5003 generic.go:334] "Generic (PLEG): container finished" podID="2a9437ea-d818-4371-907f-ef50e49fab62" containerID="388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.167762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" event={"ID":"2a9437ea-d818-4371-907f-ef50e49fab62","Type":"ContainerDied","Data":"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.167792 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" event={"ID":"2a9437ea-d818-4371-907f-ef50e49fab62","Type":"ContainerDied","Data":"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.167805 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" event={"ID":"2a9437ea-d818-4371-907f-ef50e49fab62","Type":"ContainerDied","Data":"5723774673dd0ed1b26c73df12ee771aca77fc4c06ba4b970fc03cd073aa6a97"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.167863 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.193904 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="46c426d4fe650f5e73c337596bdeb2b5dec8aeaa428f77d0f7ac065c13d8f552" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.193973 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="89ccdc73c6e6eb130b5e9248e40895a08f4cff58ffa60a751802e275bf53e801" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.193999 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="ab22dfa83cbe9bb9a6d88dddc446c510d433ba02c0ee0eb84bbc1e2f8efd3b96" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194013 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="cf124e879cc2c0f0ae011f0c34385d51444c59a3492a0303e60d8161cbc9321e" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194025 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="223f2d4605898f119dbf0429c94380cc06f6771657025bb95012a8194c5c6c8c" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.193961 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"46c426d4fe650f5e73c337596bdeb2b5dec8aeaa428f77d0f7ac065c13d8f552"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194136 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"89ccdc73c6e6eb130b5e9248e40895a08f4cff58ffa60a751802e275bf53e801"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194155 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"ab22dfa83cbe9bb9a6d88dddc446c510d433ba02c0ee0eb84bbc1e2f8efd3b96"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194167 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"cf124e879cc2c0f0ae011f0c34385d51444c59a3492a0303e60d8161cbc9321e"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194178 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"223f2d4605898f119dbf0429c94380cc06f6771657025bb95012a8194c5c6c8c"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194190 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"828905524a607e4481f9f56be6aea9ed539eef768da3cb769bba5d4bd9185a41"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194066 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="828905524a607e4481f9f56be6aea9ed539eef768da3cb769bba5d4bd9185a41" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194216 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="6cf23814347ea3b8ff01a79b40bc34c3613bfc514f28d734fc77c3fb9f66bd7c" exitCode=0 Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.194235 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"6cf23814347ea3b8ff01a79b40bc34c3613bfc514f28d734fc77c3fb9f66bd7c"} Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.553147 5003 scope.go:117] "RemoveContainer" containerID="301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.579611 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf"] Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.587331 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-gvvmf"] Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.603165 5003 scope.go:117] "RemoveContainer" containerID="388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.622393 5003 scope.go:117] "RemoveContainer" containerID="301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d" Jan 26 11:01:52 crc kubenswrapper[5003]: E0126 11:01:52.623228 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d\": container with ID starting with 301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d not found: ID does not exist" containerID="301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.623330 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d"} err="failed to get container status \"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d\": rpc error: code = NotFound desc = could not find container \"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d\": container with ID starting with 301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d not found: ID does not exist" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.623372 5003 scope.go:117] "RemoveContainer" containerID="388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d" Jan 26 11:01:52 crc kubenswrapper[5003]: E0126 11:01:52.624045 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d\": container with ID starting with 388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d not found: ID does not exist" containerID="388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.624157 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d"} err="failed to get container status \"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d\": rpc error: code = NotFound desc = could not find container \"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d\": container with ID starting with 388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d not found: ID does not exist" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.624260 5003 scope.go:117] "RemoveContainer" containerID="301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.624924 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d"} err="failed to get container status \"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d\": rpc error: code = NotFound desc = could not find container \"301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d\": container with ID starting with 301129a4469083c1cbfc055f4d68e951c799bf81380e7634dfcfb7e1b44f082d not found: ID does not exist" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.624982 5003 scope.go:117] "RemoveContainer" containerID="388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d" Jan 26 11:01:52 crc kubenswrapper[5003]: I0126 11:01:52.625543 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d"} err="failed to get container status \"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d\": rpc error: code = NotFound desc = could not find container \"388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d\": container with ID starting with 388624f524767eac6b63acfd806c460011a8d4796ef7c899b8d2ca4cc7b5529d not found: ID does not exist" Jan 26 11:01:53 crc kubenswrapper[5003]: I0126 11:01:53.012768 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" path="/var/lib/kubelet/pods/2a9437ea-d818-4371-907f-ef50e49fab62/volumes" Jan 26 11:01:53 crc kubenswrapper[5003]: I0126 11:01:53.013573 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad757f65-110b-4d88-a83d-d1997664923d" path="/var/lib/kubelet/pods/ad757f65-110b-4d88-a83d-d1997664923d/volumes" Jan 26 11:02:09 crc kubenswrapper[5003]: I0126 11:02:09.039861 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:02:09 crc kubenswrapper[5003]: I0126 11:02:09.040520 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:02:20 crc kubenswrapper[5003]: I0126 11:02:20.477418 5003 generic.go:334] "Generic (PLEG): container finished" podID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerID="a024b8fd7dc8bd7cbf6095768209b0a3c16cfe3955353d3371acae31c684d648" exitCode=137 Jan 26 11:02:20 crc kubenswrapper[5003]: I0126 11:02:20.477477 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"a024b8fd7dc8bd7cbf6095768209b0a3c16cfe3955353d3371acae31c684d648"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.233491 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.242916 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306298 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-etc-swift\") pod \"980cff26-19b2-457a-a90f-b6acec8de879\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306349 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-lock\") pod \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306386 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkcg6\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-kube-api-access-vkcg6\") pod \"980cff26-19b2-457a-a90f-b6acec8de879\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306410 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-cache\") pod \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306435 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-etc-swift\") pod \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306484 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"980cff26-19b2-457a-a90f-b6acec8de879\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306516 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306541 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-lock\") pod \"980cff26-19b2-457a-a90f-b6acec8de879\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306580 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znxs4\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-kube-api-access-znxs4\") pod \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\" (UID: \"f16c3bea-96a9-4034-8848-de4d7bbcb6b5\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306605 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-cache\") pod \"980cff26-19b2-457a-a90f-b6acec8de879\" (UID: \"980cff26-19b2-457a-a90f-b6acec8de879\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306927 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-lock" (OuterVolumeSpecName: "lock") pod "f16c3bea-96a9-4034-8848-de4d7bbcb6b5" (UID: "f16c3bea-96a9-4034-8848-de4d7bbcb6b5"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.306940 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-cache" (OuterVolumeSpecName: "cache") pod "f16c3bea-96a9-4034-8848-de4d7bbcb6b5" (UID: "f16c3bea-96a9-4034-8848-de4d7bbcb6b5"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.307361 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-cache" (OuterVolumeSpecName: "cache") pod "980cff26-19b2-457a-a90f-b6acec8de879" (UID: "980cff26-19b2-457a-a90f-b6acec8de879"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.307747 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-lock" (OuterVolumeSpecName: "lock") pod "980cff26-19b2-457a-a90f-b6acec8de879" (UID: "980cff26-19b2-457a-a90f-b6acec8de879"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.313578 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "swift") pod "f16c3bea-96a9-4034-8848-de4d7bbcb6b5" (UID: "f16c3bea-96a9-4034-8848-de4d7bbcb6b5"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.313886 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-kube-api-access-znxs4" (OuterVolumeSpecName: "kube-api-access-znxs4") pod "f16c3bea-96a9-4034-8848-de4d7bbcb6b5" (UID: "f16c3bea-96a9-4034-8848-de4d7bbcb6b5"). InnerVolumeSpecName "kube-api-access-znxs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.313920 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "980cff26-19b2-457a-a90f-b6acec8de879" (UID: "980cff26-19b2-457a-a90f-b6acec8de879"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.314622 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f16c3bea-96a9-4034-8848-de4d7bbcb6b5" (UID: "f16c3bea-96a9-4034-8848-de4d7bbcb6b5"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.314516 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "swift") pod "980cff26-19b2-457a-a90f-b6acec8de879" (UID: "980cff26-19b2-457a-a90f-b6acec8de879"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.315776 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-kube-api-access-vkcg6" (OuterVolumeSpecName: "kube-api-access-vkcg6") pod "980cff26-19b2-457a-a90f-b6acec8de879" (UID: "980cff26-19b2-457a-a90f-b6acec8de879"). InnerVolumeSpecName "kube-api-access-vkcg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.317259 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.407699 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"b1cd12b2-b1e1-4085-b627-52c229799294\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408101 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-cache\") pod \"b1cd12b2-b1e1-4085-b627-52c229799294\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408128 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") pod \"b1cd12b2-b1e1-4085-b627-52c229799294\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408145 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-lock\") pod \"b1cd12b2-b1e1-4085-b627-52c229799294\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408183 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpdtb\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-kube-api-access-bpdtb\") pod \"b1cd12b2-b1e1-4085-b627-52c229799294\" (UID: \"b1cd12b2-b1e1-4085-b627-52c229799294\") " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408647 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408703 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408714 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkcg6\" (UniqueName: \"kubernetes.io/projected/980cff26-19b2-457a-a90f-b6acec8de879-kube-api-access-vkcg6\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408726 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408773 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408795 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408810 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408820 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408831 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znxs4\" (UniqueName: \"kubernetes.io/projected/f16c3bea-96a9-4034-8848-de4d7bbcb6b5-kube-api-access-znxs4\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408842 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/980cff26-19b2-457a-a90f-b6acec8de879-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.408882 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-cache" (OuterVolumeSpecName: "cache") pod "b1cd12b2-b1e1-4085-b627-52c229799294" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.410550 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-lock" (OuterVolumeSpecName: "lock") pod "b1cd12b2-b1e1-4085-b627-52c229799294" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.410609 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "swift") pod "b1cd12b2-b1e1-4085-b627-52c229799294" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.412376 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b1cd12b2-b1e1-4085-b627-52c229799294" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.414849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-kube-api-access-bpdtb" (OuterVolumeSpecName: "kube-api-access-bpdtb") pod "b1cd12b2-b1e1-4085-b627-52c229799294" (UID: "b1cd12b2-b1e1-4085-b627-52c229799294"). InnerVolumeSpecName "kube-api-access-bpdtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.424709 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.425841 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.499483 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f16c3bea-96a9-4034-8848-de4d7bbcb6b5","Type":"ContainerDied","Data":"5a4eb25959254562739801e0f898d0c4f367dd7f9bd2b17c11f2ce2d39703966"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.499541 5003 scope.go:117] "RemoveContainer" containerID="a024b8fd7dc8bd7cbf6095768209b0a3c16cfe3955353d3371acae31c684d648" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.499732 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.510078 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.510103 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.510113 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.510121 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b1cd12b2-b1e1-4085-b627-52c229799294-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.510130 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.510139 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.510148 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpdtb\" (UniqueName: \"kubernetes.io/projected/b1cd12b2-b1e1-4085-b627-52c229799294-kube-api-access-bpdtb\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.513702 5003 generic.go:334] "Generic (PLEG): container finished" podID="b1cd12b2-b1e1-4085-b627-52c229799294" containerID="5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d" exitCode=137 Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.513795 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.513832 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"b1cd12b2-b1e1-4085-b627-52c229799294","Type":"ContainerDied","Data":"6c2939cb07b2a5206f72cbe482dccac648773163f192f36d2ffe8be26162d9f3"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.513860 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530144 5003 generic.go:334] "Generic (PLEG): container finished" podID="980cff26-19b2-457a-a90f-b6acec8de879" containerID="2467dcc8c0218a5a485c7ccfb590d97d4ca41ea8627a790c4e2320823b03ad2f" exitCode=137 Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530191 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"2467dcc8c0218a5a485c7ccfb590d97d4ca41ea8627a790c4e2320823b03ad2f"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530221 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"912adaa9bef73e5f1d97033eb93b798efae215bd8772f43f103e39cf7aad8388"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530235 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9c6b3334e92374c6d259b7ddf52a594a345cfb04f8f2c05427f6062bf09785a"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530243 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5cdaea9b636f11d0fb48e590deae50e2c4e15c0c8f5d4c656019ede0b2b44a63"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530251 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5b84314763903eda18ff9726b3817ee45dc85ecaf53a0756d21a03cb7724fd72"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530257 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae8e8ebec6a74056fa7110c0c98ddfb1321b868bef9b07236ded09565e4d7dc4"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530264 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"863cbbb42dda1f2f274f0f2c3452a7cb31cc6e098ed381e878d0200619ba1b04"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530272 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0ee0f1ae0834b8c92fa8008b131bef63fb72cec700023820e98184dec9e6aad"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530301 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40f68e661a9dd524e00f97a9724bddfe1020d8ccf737c7184461d1bc029d39cd"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530309 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2fc7a4d553254bcfd70a6603c24123f5313042a2a942cd5c7ec6a56de544854a"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530325 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"980cff26-19b2-457a-a90f-b6acec8de879","Type":"ContainerDied","Data":"272993c17960c33ffdf6d1529a81a1a7b0f9bcc8cdf73b78636648c1e33eaaad"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530338 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2467dcc8c0218a5a485c7ccfb590d97d4ca41ea8627a790c4e2320823b03ad2f"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530347 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"202233758ef7a1a236827861b8e588e8b5f8373c3d8da3ee47ca61590a088834"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530354 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3b84daf1e1ebe868e5535ef06c08f90895842502ba9cb87c2e317de2e8a3b0a1"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530360 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e587b56a1fee3e5498a7bf5cebf4a67b1d11d7fb571ba066fe4abbc70a333cd0"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530367 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"052b65a9db7b7f7ac1b5a0605df62902cad9ecdc3de5c5a5712bdb1cc615a4cf"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530373 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4be5d77c647c6c4144f10a613250b882d016f92354befa8b49599944c17a919b"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530380 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"912adaa9bef73e5f1d97033eb93b798efae215bd8772f43f103e39cf7aad8388"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530386 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b9c6b3334e92374c6d259b7ddf52a594a345cfb04f8f2c05427f6062bf09785a"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530393 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5cdaea9b636f11d0fb48e590deae50e2c4e15c0c8f5d4c656019ede0b2b44a63"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530399 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5b84314763903eda18ff9726b3817ee45dc85ecaf53a0756d21a03cb7724fd72"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530405 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae8e8ebec6a74056fa7110c0c98ddfb1321b868bef9b07236ded09565e4d7dc4"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530412 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"863cbbb42dda1f2f274f0f2c3452a7cb31cc6e098ed381e878d0200619ba1b04"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530418 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0ee0f1ae0834b8c92fa8008b131bef63fb72cec700023820e98184dec9e6aad"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530424 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40f68e661a9dd524e00f97a9724bddfe1020d8ccf737c7184461d1bc029d39cd"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530431 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2fc7a4d553254bcfd70a6603c24123f5313042a2a942cd5c7ec6a56de544854a"} Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.530586 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.541965 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.544256 5003 scope.go:117] "RemoveContainer" containerID="46c426d4fe650f5e73c337596bdeb2b5dec8aeaa428f77d0f7ac065c13d8f552" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.552593 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.559928 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.568412 5003 scope.go:117] "RemoveContainer" containerID="82da56a790222dcbdea421219ef568934a45415dca26fc78c6cd9c631ecdaf76" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.570683 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.585954 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.596784 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.597699 5003 scope.go:117] "RemoveContainer" containerID="99f69c88cdf13f6ba1e6d0cdf194bc16e66195d82b495b7921f58754a4510a65" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.606172 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.611733 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.620243 5003 scope.go:117] "RemoveContainer" containerID="6e6b84ceaef29c9be2a1d31c5585a12a7510e68a4029b56e2e32585b6a079e82" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.639985 5003 scope.go:117] "RemoveContainer" containerID="89ccdc73c6e6eb130b5e9248e40895a08f4cff58ffa60a751802e275bf53e801" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.661601 5003 scope.go:117] "RemoveContainer" containerID="ab22dfa83cbe9bb9a6d88dddc446c510d433ba02c0ee0eb84bbc1e2f8efd3b96" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.682706 5003 scope.go:117] "RemoveContainer" containerID="aa7bcf55cc48b89ce5714a5f9d10bec01f96042d06feff52fb249c766c509546" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.702192 5003 scope.go:117] "RemoveContainer" containerID="cf124e879cc2c0f0ae011f0c34385d51444c59a3492a0303e60d8161cbc9321e" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.730595 5003 scope.go:117] "RemoveContainer" containerID="4b5e0284abaaa6fc5f1745a23ed3352066d05ab79c3c7c044132fffd49069085" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.752661 5003 scope.go:117] "RemoveContainer" containerID="223f2d4605898f119dbf0429c94380cc06f6771657025bb95012a8194c5c6c8c" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.771115 5003 scope.go:117] "RemoveContainer" containerID="b0a8a7d412bfe444befc54bd057301ba0b92fe7537adec15f35659f2c2a6262e" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.791790 5003 scope.go:117] "RemoveContainer" containerID="828905524a607e4481f9f56be6aea9ed539eef768da3cb769bba5d4bd9185a41" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.811236 5003 scope.go:117] "RemoveContainer" containerID="c5b0c6029e6937dc34433353c875e403332f61fea1a6089932ce0b2f9e2231cb" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.826235 5003 scope.go:117] "RemoveContainer" containerID="6cf23814347ea3b8ff01a79b40bc34c3613bfc514f28d734fc77c3fb9f66bd7c" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.843701 5003 scope.go:117] "RemoveContainer" containerID="5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.863409 5003 scope.go:117] "RemoveContainer" containerID="1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.880304 5003 scope.go:117] "RemoveContainer" containerID="aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.898776 5003 scope.go:117] "RemoveContainer" containerID="26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.926255 5003 scope.go:117] "RemoveContainer" containerID="59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.958987 5003 scope.go:117] "RemoveContainer" containerID="7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05" Jan 26 11:02:21 crc kubenswrapper[5003]: I0126 11:02:21.987062 5003 scope.go:117] "RemoveContainer" containerID="14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.038091 5003 scope.go:117] "RemoveContainer" containerID="242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.061646 5003 scope.go:117] "RemoveContainer" containerID="88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.085437 5003 scope.go:117] "RemoveContainer" containerID="1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.107402 5003 scope.go:117] "RemoveContainer" containerID="f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.127058 5003 scope.go:117] "RemoveContainer" containerID="4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.147679 5003 scope.go:117] "RemoveContainer" containerID="a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.168717 5003 scope.go:117] "RemoveContainer" containerID="eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.186709 5003 scope.go:117] "RemoveContainer" containerID="dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.204762 5003 scope.go:117] "RemoveContainer" containerID="5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.205368 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d\": container with ID starting with 5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d not found: ID does not exist" containerID="5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.205434 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d"} err="failed to get container status \"5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d\": rpc error: code = NotFound desc = could not find container \"5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d\": container with ID starting with 5fe0c6d4431b856fe4436d168ee7e1e5d4399d39c3a6c76037edb9e8ab19d93d not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.205482 5003 scope.go:117] "RemoveContainer" containerID="1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.206298 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3\": container with ID starting with 1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3 not found: ID does not exist" containerID="1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.206335 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3"} err="failed to get container status \"1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3\": rpc error: code = NotFound desc = could not find container \"1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3\": container with ID starting with 1f052a09e3da171b6ed8c264f5462cebfaf9cc8cd6e4d643c4676c1c3c0f0bf3 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.206358 5003 scope.go:117] "RemoveContainer" containerID="aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.206657 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705\": container with ID starting with aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705 not found: ID does not exist" containerID="aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.206692 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705"} err="failed to get container status \"aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705\": rpc error: code = NotFound desc = could not find container \"aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705\": container with ID starting with aa8f9aadbc5e77757c5abbb131c7c069a35d27f56fe76f934fca1651e6618705 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.206715 5003 scope.go:117] "RemoveContainer" containerID="26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.206971 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680\": container with ID starting with 26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680 not found: ID does not exist" containerID="26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.206997 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680"} err="failed to get container status \"26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680\": rpc error: code = NotFound desc = could not find container \"26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680\": container with ID starting with 26114010e05fab42d39367ece8f172ab8c229ebdd45980ebbe883834403c5680 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.207012 5003 scope.go:117] "RemoveContainer" containerID="59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.207303 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446\": container with ID starting with 59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446 not found: ID does not exist" containerID="59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.207330 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446"} err="failed to get container status \"59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446\": rpc error: code = NotFound desc = could not find container \"59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446\": container with ID starting with 59cbbeca4e455551fd4d58e02cd043424dc43dbb708f812dc323404c1a202446 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.207344 5003 scope.go:117] "RemoveContainer" containerID="7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.207620 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05\": container with ID starting with 7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05 not found: ID does not exist" containerID="7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.207659 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05"} err="failed to get container status \"7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05\": rpc error: code = NotFound desc = could not find container \"7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05\": container with ID starting with 7b2534758d832bcd1a530711efe81788030cd894bec690d03b6191096a789a05 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.207677 5003 scope.go:117] "RemoveContainer" containerID="14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.208085 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd\": container with ID starting with 14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd not found: ID does not exist" containerID="14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.208109 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd"} err="failed to get container status \"14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd\": rpc error: code = NotFound desc = could not find container \"14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd\": container with ID starting with 14e62a22aa945c458ae016769350cba7c01a838b85299573d468b2fa1cef07cd not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.208124 5003 scope.go:117] "RemoveContainer" containerID="242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.208518 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548\": container with ID starting with 242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548 not found: ID does not exist" containerID="242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.208549 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548"} err="failed to get container status \"242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548\": rpc error: code = NotFound desc = could not find container \"242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548\": container with ID starting with 242c4b5df0783c40de8c8d50308ef656e8bc43b2c49800e6e0f32a0c9d3b5548 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.208568 5003 scope.go:117] "RemoveContainer" containerID="88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.209178 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666\": container with ID starting with 88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666 not found: ID does not exist" containerID="88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.209201 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666"} err="failed to get container status \"88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666\": rpc error: code = NotFound desc = could not find container \"88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666\": container with ID starting with 88d289c8c29cef995cba467c3ef015fe579c965aa3a27e317ec0952f33134666 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.209224 5003 scope.go:117] "RemoveContainer" containerID="1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.209517 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af\": container with ID starting with 1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af not found: ID does not exist" containerID="1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.209549 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af"} err="failed to get container status \"1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af\": rpc error: code = NotFound desc = could not find container \"1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af\": container with ID starting with 1265d9b3a3feebe021921e531bdb66eae7dd896fa15b34ab8198f0a228a053af not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.209566 5003 scope.go:117] "RemoveContainer" containerID="f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.209870 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6\": container with ID starting with f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6 not found: ID does not exist" containerID="f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.209893 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6"} err="failed to get container status \"f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6\": rpc error: code = NotFound desc = could not find container \"f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6\": container with ID starting with f8ead6df5ff9818293ee63a8f4549cf26db872cb3414f924dadf1e20d52850b6 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.209908 5003 scope.go:117] "RemoveContainer" containerID="4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.210130 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e\": container with ID starting with 4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e not found: ID does not exist" containerID="4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.210154 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e"} err="failed to get container status \"4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e\": rpc error: code = NotFound desc = could not find container \"4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e\": container with ID starting with 4490066eb9bd6c66b9b0208990bd8d6ddb6b95928de76266ff83dabcfbf84d8e not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.210170 5003 scope.go:117] "RemoveContainer" containerID="a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.210522 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1\": container with ID starting with a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1 not found: ID does not exist" containerID="a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.210556 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1"} err="failed to get container status \"a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1\": rpc error: code = NotFound desc = could not find container \"a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1\": container with ID starting with a64c4272241480892dfe3f2278ada32e293df4c15d3e4af914b9b432ee3fcac1 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.210576 5003 scope.go:117] "RemoveContainer" containerID="eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.210955 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64\": container with ID starting with eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64 not found: ID does not exist" containerID="eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.210981 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64"} err="failed to get container status \"eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64\": rpc error: code = NotFound desc = could not find container \"eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64\": container with ID starting with eae01b5370d1e228605d370f34df9fa397a7d68cf59e29307321708ca0b44d64 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.211002 5003 scope.go:117] "RemoveContainer" containerID="dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1" Jan 26 11:02:22 crc kubenswrapper[5003]: E0126 11:02:22.211420 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1\": container with ID starting with dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1 not found: ID does not exist" containerID="dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.211444 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1"} err="failed to get container status \"dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1\": rpc error: code = NotFound desc = could not find container \"dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1\": container with ID starting with dd1fb65813942bf400b95e20d6ad2d0c12a6094c8721f5e09ce6ed2eca47bbd1 not found: ID does not exist" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.211457 5003 scope.go:117] "RemoveContainer" containerID="2467dcc8c0218a5a485c7ccfb590d97d4ca41ea8627a790c4e2320823b03ad2f" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.229649 5003 scope.go:117] "RemoveContainer" containerID="202233758ef7a1a236827861b8e588e8b5f8373c3d8da3ee47ca61590a088834" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.248493 5003 scope.go:117] "RemoveContainer" containerID="3b84daf1e1ebe868e5535ef06c08f90895842502ba9cb87c2e317de2e8a3b0a1" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.275005 5003 scope.go:117] "RemoveContainer" containerID="e587b56a1fee3e5498a7bf5cebf4a67b1d11d7fb571ba066fe4abbc70a333cd0" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.297178 5003 scope.go:117] "RemoveContainer" containerID="052b65a9db7b7f7ac1b5a0605df62902cad9ecdc3de5c5a5712bdb1cc615a4cf" Jan 26 11:02:22 crc kubenswrapper[5003]: I0126 11:02:22.313326 5003 scope.go:117] "RemoveContainer" containerID="4be5d77c647c6c4144f10a613250b882d016f92354befa8b49599944c17a919b" Jan 26 11:02:23 crc kubenswrapper[5003]: I0126 11:02:23.011195 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="980cff26-19b2-457a-a90f-b6acec8de879" path="/var/lib/kubelet/pods/980cff26-19b2-457a-a90f-b6acec8de879/volumes" Jan 26 11:02:23 crc kubenswrapper[5003]: I0126 11:02:23.013121 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" path="/var/lib/kubelet/pods/b1cd12b2-b1e1-4085-b627-52c229799294/volumes" Jan 26 11:02:23 crc kubenswrapper[5003]: I0126 11:02:23.014896 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" path="/var/lib/kubelet/pods/f16c3bea-96a9-4034-8848-de4d7bbcb6b5/volumes" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.693856 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694716 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694732 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694739 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694745 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694760 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694769 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694776 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694783 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694791 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694797 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694805 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694811 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694823 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694828 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694836 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad757f65-110b-4d88-a83d-d1997664923d" containerName="swift-ring-rebalance" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694842 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad757f65-110b-4d88-a83d-d1997664923d" containerName="swift-ring-rebalance" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694855 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694863 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694875 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694881 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694890 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694898 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694906 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-httpd" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694912 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-httpd" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694921 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694927 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694934 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694941 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694953 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694959 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694967 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694973 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694982 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.694989 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.694996 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695001 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695010 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695016 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695027 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695032 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695042 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695048 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695058 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695065 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695072 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695077 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695085 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695091 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695102 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695109 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695116 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695122 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695132 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695138 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695149 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695157 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695165 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695170 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695179 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695185 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695193 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695198 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695205 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695210 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695219 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695226 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695234 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695239 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695248 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695255 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695298 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695304 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695314 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695319 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695326 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695332 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695343 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695348 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695358 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695364 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695376 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695383 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695393 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695399 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695412 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695418 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695427 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695434 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695440 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695446 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695453 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695460 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695470 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695475 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.695483 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695490 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695645 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695655 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695665 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695674 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695686 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695695 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695704 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695713 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695720 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-httpd" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695726 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695735 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695743 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695750 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695762 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695769 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a9437ea-d818-4371-907f-ef50e49fab62" containerName="proxy-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695777 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695787 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695797 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695806 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695815 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695826 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695834 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695841 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695848 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695855 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695863 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695869 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695876 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695884 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695891 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-reaper" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695899 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695907 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695914 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695922 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695930 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695938 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="rsync" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695947 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695954 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="container-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695961 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="account-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695970 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-updater" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695979 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-replicator" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695987 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="object-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.695997 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cd12b2-b1e1-4085-b627-52c229799294" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.696007 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="object-expirer" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.696015 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="container-server" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.696025 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad757f65-110b-4d88-a83d-d1997664923d" containerName="swift-ring-rebalance" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.696034 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16c3bea-96a9-4034-8848-de4d7bbcb6b5" containerName="account-auditor" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.696043 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="980cff26-19b2-457a-a90f-b6acec8de879" containerName="swift-recon-cron" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.701184 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.704379 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.704811 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-h84cx" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.705513 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.706041 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.725063 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.774712 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-cache\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.774796 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.774857 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcmzj\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-kube-api-access-tcmzj\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.774885 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.774935 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-lock\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.875803 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-cache\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.875883 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.875944 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcmzj\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-kube-api-access-tcmzj\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.875974 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.875996 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-lock\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.876655 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-cache\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.876734 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-lock\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.876895 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.876951 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:02:24 crc kubenswrapper[5003]: E0126 11:02:24.877050 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift podName:db324618-6b4b-44b1-b223-8f4cd5680226 nodeName:}" failed. No retries permitted until 2026-01-26 11:02:25.377012865 +0000 UTC m=+1160.918238426 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift") pod "swift-storage-0" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226") : configmap "swift-ring-files" not found Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.877146 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") device mount path \"/mnt/openstack/pv12\"" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.912052 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcmzj\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-kube-api-access-tcmzj\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:24 crc kubenswrapper[5003]: I0126 11:02:24.913528 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:25 crc kubenswrapper[5003]: I0126 11:02:25.385904 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:25 crc kubenswrapper[5003]: E0126 11:02:25.386066 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:02:25 crc kubenswrapper[5003]: E0126 11:02:25.386085 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:02:25 crc kubenswrapper[5003]: E0126 11:02:25.386134 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift podName:db324618-6b4b-44b1-b223-8f4cd5680226 nodeName:}" failed. No retries permitted until 2026-01-26 11:02:26.386120162 +0000 UTC m=+1161.927345723 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift") pod "swift-storage-0" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226") : configmap "swift-ring-files" not found Jan 26 11:02:26 crc kubenswrapper[5003]: I0126 11:02:26.400666 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:26 crc kubenswrapper[5003]: E0126 11:02:26.400851 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:02:26 crc kubenswrapper[5003]: E0126 11:02:26.401103 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:02:26 crc kubenswrapper[5003]: E0126 11:02:26.401161 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift podName:db324618-6b4b-44b1-b223-8f4cd5680226 nodeName:}" failed. No retries permitted until 2026-01-26 11:02:28.401139847 +0000 UTC m=+1163.942365408 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift") pod "swift-storage-0" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226") : configmap "swift-ring-files" not found Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.431942 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:28 crc kubenswrapper[5003]: E0126 11:02:28.432138 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:02:28 crc kubenswrapper[5003]: E0126 11:02:28.432151 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:02:28 crc kubenswrapper[5003]: E0126 11:02:28.432204 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift podName:db324618-6b4b-44b1-b223-8f4cd5680226 nodeName:}" failed. No retries permitted until 2026-01-26 11:02:32.432189715 +0000 UTC m=+1167.973415266 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift") pod "swift-storage-0" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226") : configmap "swift-ring-files" not found Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.619083 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-x6z4g"] Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.620061 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.628165 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.628423 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.628482 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.629051 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-x6z4g"] Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.739106 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-ring-data-devices\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.739336 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-scripts\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.739401 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57q4w\" (UniqueName: \"kubernetes.io/projected/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-kube-api-access-57q4w\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.739527 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-swiftconf\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.739562 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-etc-swift\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.739652 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-dispersionconf\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.840879 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-scripts\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.840974 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57q4w\" (UniqueName: \"kubernetes.io/projected/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-kube-api-access-57q4w\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.841057 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-swiftconf\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.841093 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-etc-swift\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.841148 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-dispersionconf\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.841201 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-ring-data-devices\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.841946 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-etc-swift\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.842520 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-ring-data-devices\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.843023 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-scripts\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.850748 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-swiftconf\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.851575 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-dispersionconf\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.862695 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57q4w\" (UniqueName: \"kubernetes.io/projected/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-kube-api-access-57q4w\") pod \"swift-ring-rebalance-x6z4g\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:28 crc kubenswrapper[5003]: I0126 11:02:28.946178 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:29 crc kubenswrapper[5003]: I0126 11:02:29.392462 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-x6z4g"] Jan 26 11:02:29 crc kubenswrapper[5003]: I0126 11:02:29.613458 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" event={"ID":"2e4c83f1-0d2b-4389-b5fd-d3497db222d2","Type":"ContainerStarted","Data":"0b6eafaec6846be26830003b61490a92979a844d0b75d77b7f332c30f2639a87"} Jan 26 11:02:30 crc kubenswrapper[5003]: I0126 11:02:30.624697 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" event={"ID":"2e4c83f1-0d2b-4389-b5fd-d3497db222d2","Type":"ContainerStarted","Data":"f259088594911367b024408b1590172a888f97a734e2e2d25a371ac7a2317752"} Jan 26 11:02:30 crc kubenswrapper[5003]: I0126 11:02:30.672119 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" podStartSLOduration=2.672095664 podStartE2EDuration="2.672095664s" podCreationTimestamp="2026-01-26 11:02:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:02:30.66584439 +0000 UTC m=+1166.207069981" watchObservedRunningTime="2026-01-26 11:02:30.672095664 +0000 UTC m=+1166.213321245" Jan 26 11:02:32 crc kubenswrapper[5003]: I0126 11:02:32.514000 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:32 crc kubenswrapper[5003]: E0126 11:02:32.514338 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:02:32 crc kubenswrapper[5003]: E0126 11:02:32.514387 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:02:32 crc kubenswrapper[5003]: E0126 11:02:32.514479 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift podName:db324618-6b4b-44b1-b223-8f4cd5680226 nodeName:}" failed. No retries permitted until 2026-01-26 11:02:40.514449475 +0000 UTC m=+1176.055675026 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift") pod "swift-storage-0" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226") : configmap "swift-ring-files" not found Jan 26 11:02:37 crc kubenswrapper[5003]: I0126 11:02:37.680380 5003 generic.go:334] "Generic (PLEG): container finished" podID="2e4c83f1-0d2b-4389-b5fd-d3497db222d2" containerID="f259088594911367b024408b1590172a888f97a734e2e2d25a371ac7a2317752" exitCode=0 Jan 26 11:02:37 crc kubenswrapper[5003]: I0126 11:02:37.680483 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" event={"ID":"2e4c83f1-0d2b-4389-b5fd-d3497db222d2","Type":"ContainerDied","Data":"f259088594911367b024408b1590172a888f97a734e2e2d25a371ac7a2317752"} Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.011128 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.039835 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.039917 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.039978 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.040658 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e95ef3044b7da7897332a1c0dc0a352de84ea5dd8273e8eb61313248ed95c0df"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.040776 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://e95ef3044b7da7897332a1c0dc0a352de84ea5dd8273e8eb61313248ed95c0df" gracePeriod=600 Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.135820 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-dispersionconf\") pod \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.135983 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-scripts\") pod \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.136036 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-ring-data-devices\") pod \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.136079 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-etc-swift\") pod \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.136199 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-swiftconf\") pod \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.136230 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57q4w\" (UniqueName: \"kubernetes.io/projected/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-kube-api-access-57q4w\") pod \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\" (UID: \"2e4c83f1-0d2b-4389-b5fd-d3497db222d2\") " Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.140472 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2e4c83f1-0d2b-4389-b5fd-d3497db222d2" (UID: "2e4c83f1-0d2b-4389-b5fd-d3497db222d2"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.140874 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2e4c83f1-0d2b-4389-b5fd-d3497db222d2" (UID: "2e4c83f1-0d2b-4389-b5fd-d3497db222d2"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.146435 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-kube-api-access-57q4w" (OuterVolumeSpecName: "kube-api-access-57q4w") pod "2e4c83f1-0d2b-4389-b5fd-d3497db222d2" (UID: "2e4c83f1-0d2b-4389-b5fd-d3497db222d2"). InnerVolumeSpecName "kube-api-access-57q4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.160329 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2e4c83f1-0d2b-4389-b5fd-d3497db222d2" (UID: "2e4c83f1-0d2b-4389-b5fd-d3497db222d2"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.165190 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-scripts" (OuterVolumeSpecName: "scripts") pod "2e4c83f1-0d2b-4389-b5fd-d3497db222d2" (UID: "2e4c83f1-0d2b-4389-b5fd-d3497db222d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.171334 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2e4c83f1-0d2b-4389-b5fd-d3497db222d2" (UID: "2e4c83f1-0d2b-4389-b5fd-d3497db222d2"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.238137 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.238570 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.238582 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.238595 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57q4w\" (UniqueName: \"kubernetes.io/projected/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-kube-api-access-57q4w\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.238610 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.238620 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2e4c83f1-0d2b-4389-b5fd-d3497db222d2-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.701443 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" event={"ID":"2e4c83f1-0d2b-4389-b5fd-d3497db222d2","Type":"ContainerDied","Data":"0b6eafaec6846be26830003b61490a92979a844d0b75d77b7f332c30f2639a87"} Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.701829 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b6eafaec6846be26830003b61490a92979a844d0b75d77b7f332c30f2639a87" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.701908 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-x6z4g" Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.711177 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="e95ef3044b7da7897332a1c0dc0a352de84ea5dd8273e8eb61313248ed95c0df" exitCode=0 Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.711256 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"e95ef3044b7da7897332a1c0dc0a352de84ea5dd8273e8eb61313248ed95c0df"} Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.711404 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"d611de2469cbe98c2fe1bc7ea60af3e72e8a66e47fcfb0fbfee926d96efd43c1"} Jan 26 11:02:39 crc kubenswrapper[5003]: I0126 11:02:39.711428 5003 scope.go:117] "RemoveContainer" containerID="f3eab31ad2a64d16b429c7fff6c1ada069433f73eabf4567b3026431fe989a0c" Jan 26 11:02:40 crc kubenswrapper[5003]: I0126 11:02:40.561181 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:40 crc kubenswrapper[5003]: I0126 11:02:40.574078 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"swift-storage-0\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:40 crc kubenswrapper[5003]: I0126 11:02:40.622939 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:02:41 crc kubenswrapper[5003]: I0126 11:02:41.076354 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:02:41 crc kubenswrapper[5003]: W0126 11:02:41.080722 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-1cf301e58efeec002ee983f45bddd5a566b3ce240df06d3e52721207a789a1c4 WatchSource:0}: Error finding container 1cf301e58efeec002ee983f45bddd5a566b3ce240df06d3e52721207a789a1c4: Status 404 returned error can't find the container with id 1cf301e58efeec002ee983f45bddd5a566b3ce240df06d3e52721207a789a1c4 Jan 26 11:02:41 crc kubenswrapper[5003]: I0126 11:02:41.740730 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76"} Jan 26 11:02:41 crc kubenswrapper[5003]: I0126 11:02:41.741714 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d"} Jan 26 11:02:41 crc kubenswrapper[5003]: I0126 11:02:41.741737 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"1cf301e58efeec002ee983f45bddd5a566b3ce240df06d3e52721207a789a1c4"} Jan 26 11:02:42 crc kubenswrapper[5003]: I0126 11:02:42.783829 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4"} Jan 26 11:02:42 crc kubenswrapper[5003]: I0126 11:02:42.783931 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8"} Jan 26 11:02:42 crc kubenswrapper[5003]: I0126 11:02:42.783947 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6"} Jan 26 11:02:42 crc kubenswrapper[5003]: I0126 11:02:42.783958 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499"} Jan 26 11:02:42 crc kubenswrapper[5003]: I0126 11:02:42.783969 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e"} Jan 26 11:02:42 crc kubenswrapper[5003]: I0126 11:02:42.783979 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85"} Jan 26 11:02:43 crc kubenswrapper[5003]: I0126 11:02:43.801870 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f"} Jan 26 11:02:43 crc kubenswrapper[5003]: I0126 11:02:43.802453 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630"} Jan 26 11:02:43 crc kubenswrapper[5003]: I0126 11:02:43.802468 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9"} Jan 26 11:02:43 crc kubenswrapper[5003]: I0126 11:02:43.802479 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b"} Jan 26 11:02:43 crc kubenswrapper[5003]: I0126 11:02:43.802491 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da"} Jan 26 11:02:43 crc kubenswrapper[5003]: I0126 11:02:43.802504 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870"} Jan 26 11:02:44 crc kubenswrapper[5003]: I0126 11:02:44.821404 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084"} Jan 26 11:02:44 crc kubenswrapper[5003]: I0126 11:02:44.821941 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerStarted","Data":"1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02"} Jan 26 11:02:44 crc kubenswrapper[5003]: I0126 11:02:44.860957 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=21.860938912 podStartE2EDuration="21.860938912s" podCreationTimestamp="2026-01-26 11:02:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:02:44.859051009 +0000 UTC m=+1180.400276570" watchObservedRunningTime="2026-01-26 11:02:44.860938912 +0000 UTC m=+1180.402164473" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.689732 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv"] Jan 26 11:02:57 crc kubenswrapper[5003]: E0126 11:02:57.690761 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4c83f1-0d2b-4389-b5fd-d3497db222d2" containerName="swift-ring-rebalance" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.690779 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4c83f1-0d2b-4389-b5fd-d3497db222d2" containerName="swift-ring-rebalance" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.690982 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e4c83f1-0d2b-4389-b5fd-d3497db222d2" containerName="swift-ring-rebalance" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.691859 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.696136 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.709983 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv"] Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.778904 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-run-httpd\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.779029 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-config-data\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.779073 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-etc-swift\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.779099 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-log-httpd\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.779134 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqzdm\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-kube-api-access-sqzdm\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.880475 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-config-data\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.880543 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-etc-swift\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.880572 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-log-httpd\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.880597 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqzdm\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-kube-api-access-sqzdm\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.880629 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-run-httpd\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.881356 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-run-httpd\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.881572 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-log-httpd\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.887999 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-etc-swift\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.888859 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-config-data\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:57 crc kubenswrapper[5003]: I0126 11:02:57.905481 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqzdm\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-kube-api-access-sqzdm\") pod \"swift-proxy-59d59699f5-g8jvv\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:58 crc kubenswrapper[5003]: I0126 11:02:58.020517 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:58 crc kubenswrapper[5003]: I0126 11:02:58.534240 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv"] Jan 26 11:02:58 crc kubenswrapper[5003]: W0126 11:02:58.544806 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c1aa18c_b3aa_4ab3_a941_5a45632ee873.slice/crio-ed80dfd860e47b7162b8a3c0d40e4189cfc6212f10878e4cfa85ba973d2f4d75 WatchSource:0}: Error finding container ed80dfd860e47b7162b8a3c0d40e4189cfc6212f10878e4cfa85ba973d2f4d75: Status 404 returned error can't find the container with id ed80dfd860e47b7162b8a3c0d40e4189cfc6212f10878e4cfa85ba973d2f4d75 Jan 26 11:02:58 crc kubenswrapper[5003]: I0126 11:02:58.966581 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" event={"ID":"9c1aa18c-b3aa-4ab3-a941-5a45632ee873","Type":"ContainerStarted","Data":"fb6119ecedd35cef11c530cefdd43e105fc3f9289d0e7fd60bc2eecb9bd27a5f"} Jan 26 11:02:58 crc kubenswrapper[5003]: I0126 11:02:58.967177 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:58 crc kubenswrapper[5003]: I0126 11:02:58.967217 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" event={"ID":"9c1aa18c-b3aa-4ab3-a941-5a45632ee873","Type":"ContainerStarted","Data":"7d24ccd5d3d9eddeaf1d9df0e4b819ad90c3bcc235e709d7616ae089e7eafad9"} Jan 26 11:02:58 crc kubenswrapper[5003]: I0126 11:02:58.967230 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:02:58 crc kubenswrapper[5003]: I0126 11:02:58.967245 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" event={"ID":"9c1aa18c-b3aa-4ab3-a941-5a45632ee873","Type":"ContainerStarted","Data":"ed80dfd860e47b7162b8a3c0d40e4189cfc6212f10878e4cfa85ba973d2f4d75"} Jan 26 11:02:58 crc kubenswrapper[5003]: I0126 11:02:58.998199 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" podStartSLOduration=1.998180142 podStartE2EDuration="1.998180142s" podCreationTimestamp="2026-01-26 11:02:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:02:58.991840645 +0000 UTC m=+1194.533066216" watchObservedRunningTime="2026-01-26 11:02:58.998180142 +0000 UTC m=+1194.539405713" Jan 26 11:03:03 crc kubenswrapper[5003]: I0126 11:03:03.027131 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:03:03 crc kubenswrapper[5003]: I0126 11:03:03.027763 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.051509 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t"] Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.053407 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.057273 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.057729 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.066500 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t"] Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.212510 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-dispersionconf\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.212594 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwwkd\" (UniqueName: \"kubernetes.io/projected/048f021e-b1b2-4992-8aef-245a47320246-kube-api-access-xwwkd\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.212755 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-ring-data-devices\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.212831 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-scripts\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.212877 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/048f021e-b1b2-4992-8aef-245a47320246-etc-swift\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.213204 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-swiftconf\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.314345 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-dispersionconf\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.314416 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwwkd\" (UniqueName: \"kubernetes.io/projected/048f021e-b1b2-4992-8aef-245a47320246-kube-api-access-xwwkd\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.314448 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-ring-data-devices\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.314471 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-scripts\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.314487 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/048f021e-b1b2-4992-8aef-245a47320246-etc-swift\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.314541 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-swiftconf\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.315315 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/048f021e-b1b2-4992-8aef-245a47320246-etc-swift\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.315722 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-ring-data-devices\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.315755 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-scripts\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.321033 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-dispersionconf\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.321608 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-swiftconf\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.336031 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwwkd\" (UniqueName: \"kubernetes.io/projected/048f021e-b1b2-4992-8aef-245a47320246-kube-api-access-xwwkd\") pod \"swift-ring-rebalance-debug-v7v2t\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.381409 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:05 crc kubenswrapper[5003]: I0126 11:03:05.853948 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t"] Jan 26 11:03:05 crc kubenswrapper[5003]: W0126 11:03:05.863584 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod048f021e_b1b2_4992_8aef_245a47320246.slice/crio-2a7f9f730d77ba6290d511f73a538ee11dba425c24bf624df381b6d6b9402fc2 WatchSource:0}: Error finding container 2a7f9f730d77ba6290d511f73a538ee11dba425c24bf624df381b6d6b9402fc2: Status 404 returned error can't find the container with id 2a7f9f730d77ba6290d511f73a538ee11dba425c24bf624df381b6d6b9402fc2 Jan 26 11:03:06 crc kubenswrapper[5003]: I0126 11:03:06.036896 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" event={"ID":"048f021e-b1b2-4992-8aef-245a47320246","Type":"ContainerStarted","Data":"2a7f9f730d77ba6290d511f73a538ee11dba425c24bf624df381b6d6b9402fc2"} Jan 26 11:03:07 crc kubenswrapper[5003]: I0126 11:03:07.045208 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" event={"ID":"048f021e-b1b2-4992-8aef-245a47320246","Type":"ContainerStarted","Data":"3ac3518c4c126fc7fa98a7ede2eb51a92817c80423ad562c4acfcfc4b5bcfbce"} Jan 26 11:03:07 crc kubenswrapper[5003]: I0126 11:03:07.071454 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" podStartSLOduration=2.071430907 podStartE2EDuration="2.071430907s" podCreationTimestamp="2026-01-26 11:03:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:03:07.063734963 +0000 UTC m=+1202.604960564" watchObservedRunningTime="2026-01-26 11:03:07.071430907 +0000 UTC m=+1202.612656468" Jan 26 11:03:09 crc kubenswrapper[5003]: I0126 11:03:09.069908 5003 generic.go:334] "Generic (PLEG): container finished" podID="048f021e-b1b2-4992-8aef-245a47320246" containerID="3ac3518c4c126fc7fa98a7ede2eb51a92817c80423ad562c4acfcfc4b5bcfbce" exitCode=0 Jan 26 11:03:09 crc kubenswrapper[5003]: I0126 11:03:09.070019 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" event={"ID":"048f021e-b1b2-4992-8aef-245a47320246","Type":"ContainerDied","Data":"3ac3518c4c126fc7fa98a7ede2eb51a92817c80423ad562c4acfcfc4b5bcfbce"} Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.380189 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.447437 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t"] Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.455869 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t"] Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.503744 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-swiftconf\") pod \"048f021e-b1b2-4992-8aef-245a47320246\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.503808 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-ring-data-devices\") pod \"048f021e-b1b2-4992-8aef-245a47320246\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.503858 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-dispersionconf\") pod \"048f021e-b1b2-4992-8aef-245a47320246\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.503919 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/048f021e-b1b2-4992-8aef-245a47320246-etc-swift\") pod \"048f021e-b1b2-4992-8aef-245a47320246\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.503942 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwwkd\" (UniqueName: \"kubernetes.io/projected/048f021e-b1b2-4992-8aef-245a47320246-kube-api-access-xwwkd\") pod \"048f021e-b1b2-4992-8aef-245a47320246\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.504024 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-scripts\") pod \"048f021e-b1b2-4992-8aef-245a47320246\" (UID: \"048f021e-b1b2-4992-8aef-245a47320246\") " Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.505504 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "048f021e-b1b2-4992-8aef-245a47320246" (UID: "048f021e-b1b2-4992-8aef-245a47320246"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.505626 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/048f021e-b1b2-4992-8aef-245a47320246-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "048f021e-b1b2-4992-8aef-245a47320246" (UID: "048f021e-b1b2-4992-8aef-245a47320246"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.514812 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048f021e-b1b2-4992-8aef-245a47320246-kube-api-access-xwwkd" (OuterVolumeSpecName: "kube-api-access-xwwkd") pod "048f021e-b1b2-4992-8aef-245a47320246" (UID: "048f021e-b1b2-4992-8aef-245a47320246"). InnerVolumeSpecName "kube-api-access-xwwkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.529789 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "048f021e-b1b2-4992-8aef-245a47320246" (UID: "048f021e-b1b2-4992-8aef-245a47320246"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.534558 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-scripts" (OuterVolumeSpecName: "scripts") pod "048f021e-b1b2-4992-8aef-245a47320246" (UID: "048f021e-b1b2-4992-8aef-245a47320246"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.544254 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "048f021e-b1b2-4992-8aef-245a47320246" (UID: "048f021e-b1b2-4992-8aef-245a47320246"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.590796 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8"] Jan 26 11:03:10 crc kubenswrapper[5003]: E0126 11:03:10.591344 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048f021e-b1b2-4992-8aef-245a47320246" containerName="swift-ring-rebalance" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.591385 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="048f021e-b1b2-4992-8aef-245a47320246" containerName="swift-ring-rebalance" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.591769 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="048f021e-b1b2-4992-8aef-245a47320246" containerName="swift-ring-rebalance" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.592720 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.601113 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8"] Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.605734 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.605784 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.605806 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/048f021e-b1b2-4992-8aef-245a47320246-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.605842 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/048f021e-b1b2-4992-8aef-245a47320246-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.605863 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/048f021e-b1b2-4992-8aef-245a47320246-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.605880 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwwkd\" (UniqueName: \"kubernetes.io/projected/048f021e-b1b2-4992-8aef-245a47320246-kube-api-access-xwwkd\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.706864 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-ring-data-devices\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.706971 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-dispersionconf\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.707036 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6410d724-ffca-4d87-a719-b87f5210623a-etc-swift\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.707157 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-scripts\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.707204 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-swiftconf\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.707234 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqf45\" (UniqueName: \"kubernetes.io/projected/6410d724-ffca-4d87-a719-b87f5210623a-kube-api-access-rqf45\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.810357 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6410d724-ffca-4d87-a719-b87f5210623a-etc-swift\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.810470 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-scripts\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.810500 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-swiftconf\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.810536 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqf45\" (UniqueName: \"kubernetes.io/projected/6410d724-ffca-4d87-a719-b87f5210623a-kube-api-access-rqf45\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.810610 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-ring-data-devices\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.810656 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-dispersionconf\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.811118 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6410d724-ffca-4d87-a719-b87f5210623a-etc-swift\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.811942 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-ring-data-devices\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.812242 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-scripts\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.817207 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-dispersionconf\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.817205 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-swiftconf\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.844161 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqf45\" (UniqueName: \"kubernetes.io/projected/6410d724-ffca-4d87-a719-b87f5210623a-kube-api-access-rqf45\") pod \"swift-ring-rebalance-debug-g9wn8\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:10 crc kubenswrapper[5003]: I0126 11:03:10.910409 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:11 crc kubenswrapper[5003]: I0126 11:03:11.012830 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="048f021e-b1b2-4992-8aef-245a47320246" path="/var/lib/kubelet/pods/048f021e-b1b2-4992-8aef-245a47320246/volumes" Jan 26 11:03:11 crc kubenswrapper[5003]: I0126 11:03:11.093348 5003 scope.go:117] "RemoveContainer" containerID="3ac3518c4c126fc7fa98a7ede2eb51a92817c80423ad562c4acfcfc4b5bcfbce" Jan 26 11:03:11 crc kubenswrapper[5003]: I0126 11:03:11.093386 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-v7v2t" Jan 26 11:03:11 crc kubenswrapper[5003]: I0126 11:03:11.409930 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8"] Jan 26 11:03:12 crc kubenswrapper[5003]: I0126 11:03:12.105956 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" event={"ID":"6410d724-ffca-4d87-a719-b87f5210623a","Type":"ContainerStarted","Data":"5dd9e7734cb77fc5bc4446e59d4ca97b938850a3b28ef7eaa13c7b81dfc7111a"} Jan 26 11:03:12 crc kubenswrapper[5003]: I0126 11:03:12.106958 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" event={"ID":"6410d724-ffca-4d87-a719-b87f5210623a","Type":"ContainerStarted","Data":"9f402f3b0c458410cf42bc9fd6b34a1ecda68fce3c455b235cc338e82f65ef79"} Jan 26 11:03:12 crc kubenswrapper[5003]: I0126 11:03:12.143589 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" podStartSLOduration=2.143561552 podStartE2EDuration="2.143561552s" podCreationTimestamp="2026-01-26 11:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:03:12.13558845 +0000 UTC m=+1207.676814021" watchObservedRunningTime="2026-01-26 11:03:12.143561552 +0000 UTC m=+1207.684787113" Jan 26 11:03:14 crc kubenswrapper[5003]: I0126 11:03:14.127556 5003 generic.go:334] "Generic (PLEG): container finished" podID="6410d724-ffca-4d87-a719-b87f5210623a" containerID="5dd9e7734cb77fc5bc4446e59d4ca97b938850a3b28ef7eaa13c7b81dfc7111a" exitCode=0 Jan 26 11:03:14 crc kubenswrapper[5003]: I0126 11:03:14.127705 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" event={"ID":"6410d724-ffca-4d87-a719-b87f5210623a","Type":"ContainerDied","Data":"5dd9e7734cb77fc5bc4446e59d4ca97b938850a3b28ef7eaa13c7b81dfc7111a"} Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.471563 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.529250 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8"] Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.536847 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8"] Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.608136 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6410d724-ffca-4d87-a719-b87f5210623a-etc-swift\") pod \"6410d724-ffca-4d87-a719-b87f5210623a\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.608220 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-swiftconf\") pod \"6410d724-ffca-4d87-a719-b87f5210623a\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.608352 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-ring-data-devices\") pod \"6410d724-ffca-4d87-a719-b87f5210623a\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.608424 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqf45\" (UniqueName: \"kubernetes.io/projected/6410d724-ffca-4d87-a719-b87f5210623a-kube-api-access-rqf45\") pod \"6410d724-ffca-4d87-a719-b87f5210623a\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.608727 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-scripts\") pod \"6410d724-ffca-4d87-a719-b87f5210623a\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.608953 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6410d724-ffca-4d87-a719-b87f5210623a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6410d724-ffca-4d87-a719-b87f5210623a" (UID: "6410d724-ffca-4d87-a719-b87f5210623a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.609451 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "6410d724-ffca-4d87-a719-b87f5210623a" (UID: "6410d724-ffca-4d87-a719-b87f5210623a"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.609567 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-dispersionconf\") pod \"6410d724-ffca-4d87-a719-b87f5210623a\" (UID: \"6410d724-ffca-4d87-a719-b87f5210623a\") " Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.610663 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6410d724-ffca-4d87-a719-b87f5210623a-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.611120 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.621182 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6410d724-ffca-4d87-a719-b87f5210623a-kube-api-access-rqf45" (OuterVolumeSpecName: "kube-api-access-rqf45") pod "6410d724-ffca-4d87-a719-b87f5210623a" (UID: "6410d724-ffca-4d87-a719-b87f5210623a"). InnerVolumeSpecName "kube-api-access-rqf45". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.635572 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-scripts" (OuterVolumeSpecName: "scripts") pod "6410d724-ffca-4d87-a719-b87f5210623a" (UID: "6410d724-ffca-4d87-a719-b87f5210623a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.636055 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "6410d724-ffca-4d87-a719-b87f5210623a" (UID: "6410d724-ffca-4d87-a719-b87f5210623a"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.645209 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "6410d724-ffca-4d87-a719-b87f5210623a" (UID: "6410d724-ffca-4d87-a719-b87f5210623a"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.712114 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqf45\" (UniqueName: \"kubernetes.io/projected/6410d724-ffca-4d87-a719-b87f5210623a-kube-api-access-rqf45\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.712162 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6410d724-ffca-4d87-a719-b87f5210623a-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.712178 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:15 crc kubenswrapper[5003]: I0126 11:03:15.712191 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6410d724-ffca-4d87-a719-b87f5210623a-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:16 crc kubenswrapper[5003]: I0126 11:03:16.148708 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f402f3b0c458410cf42bc9fd6b34a1ecda68fce3c455b235cc338e82f65ef79" Jan 26 11:03:16 crc kubenswrapper[5003]: I0126 11:03:16.148850 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-g9wn8" Jan 26 11:03:16 crc kubenswrapper[5003]: E0126 11:03:16.236742 5003 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.192:39192->38.102.83.192:40567: write tcp 38.102.83.192:39192->38.102.83.192:40567: write: broken pipe Jan 26 11:03:17 crc kubenswrapper[5003]: I0126 11:03:17.017146 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6410d724-ffca-4d87-a719-b87f5210623a" path="/var/lib/kubelet/pods/6410d724-ffca-4d87-a719-b87f5210623a/volumes" Jan 26 11:03:17 crc kubenswrapper[5003]: I0126 11:03:17.946580 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5"] Jan 26 11:03:17 crc kubenswrapper[5003]: E0126 11:03:17.947475 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6410d724-ffca-4d87-a719-b87f5210623a" containerName="swift-ring-rebalance" Jan 26 11:03:17 crc kubenswrapper[5003]: I0126 11:03:17.947497 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6410d724-ffca-4d87-a719-b87f5210623a" containerName="swift-ring-rebalance" Jan 26 11:03:17 crc kubenswrapper[5003]: I0126 11:03:17.947680 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6410d724-ffca-4d87-a719-b87f5210623a" containerName="swift-ring-rebalance" Jan 26 11:03:17 crc kubenswrapper[5003]: I0126 11:03:17.948413 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:17 crc kubenswrapper[5003]: I0126 11:03:17.955309 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:03:17 crc kubenswrapper[5003]: I0126 11:03:17.955344 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:03:17 crc kubenswrapper[5003]: I0126 11:03:17.958773 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5"] Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.051091 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-ring-data-devices\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.051191 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-swiftconf\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.051225 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqs7x\" (UniqueName: \"kubernetes.io/projected/99cb33ba-ebb2-422f-974a-43d02bdca002-kube-api-access-hqs7x\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.051260 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/99cb33ba-ebb2-422f-974a-43d02bdca002-etc-swift\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.051329 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-scripts\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.051352 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-dispersionconf\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.152552 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-swiftconf\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.152615 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqs7x\" (UniqueName: \"kubernetes.io/projected/99cb33ba-ebb2-422f-974a-43d02bdca002-kube-api-access-hqs7x\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.152663 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/99cb33ba-ebb2-422f-974a-43d02bdca002-etc-swift\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.152693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-scripts\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.152720 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-dispersionconf\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.152822 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-ring-data-devices\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.153254 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/99cb33ba-ebb2-422f-974a-43d02bdca002-etc-swift\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.153616 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-scripts\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.153906 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-ring-data-devices\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.163059 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-dispersionconf\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.164549 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-swiftconf\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.180654 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqs7x\" (UniqueName: \"kubernetes.io/projected/99cb33ba-ebb2-422f-974a-43d02bdca002-kube-api-access-hqs7x\") pod \"swift-ring-rebalance-debug-mlfn5\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.277747 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:18 crc kubenswrapper[5003]: I0126 11:03:18.717517 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5"] Jan 26 11:03:19 crc kubenswrapper[5003]: I0126 11:03:19.175392 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" event={"ID":"99cb33ba-ebb2-422f-974a-43d02bdca002","Type":"ContainerStarted","Data":"023f0eaec1dd53f44fd127cf2e768f7b234b5705d970e992c494e89878e8895b"} Jan 26 11:03:19 crc kubenswrapper[5003]: I0126 11:03:19.175826 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" event={"ID":"99cb33ba-ebb2-422f-974a-43d02bdca002","Type":"ContainerStarted","Data":"60cd9fd616fa5c1749287833386b1194dc609871d59d7162a9a51168548c3aa0"} Jan 26 11:03:19 crc kubenswrapper[5003]: I0126 11:03:19.202590 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" podStartSLOduration=2.202562074 podStartE2EDuration="2.202562074s" podCreationTimestamp="2026-01-26 11:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:03:19.196001531 +0000 UTC m=+1214.737227122" watchObservedRunningTime="2026-01-26 11:03:19.202562074 +0000 UTC m=+1214.743787635" Jan 26 11:03:20 crc kubenswrapper[5003]: I0126 11:03:20.189192 5003 generic.go:334] "Generic (PLEG): container finished" podID="99cb33ba-ebb2-422f-974a-43d02bdca002" containerID="023f0eaec1dd53f44fd127cf2e768f7b234b5705d970e992c494e89878e8895b" exitCode=0 Jan 26 11:03:20 crc kubenswrapper[5003]: I0126 11:03:20.189316 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" event={"ID":"99cb33ba-ebb2-422f-974a-43d02bdca002","Type":"ContainerDied","Data":"023f0eaec1dd53f44fd127cf2e768f7b234b5705d970e992c494e89878e8895b"} Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.536889 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.591613 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5"] Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.599022 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5"] Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.618984 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-ring-data-devices\") pod \"99cb33ba-ebb2-422f-974a-43d02bdca002\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.619121 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqs7x\" (UniqueName: \"kubernetes.io/projected/99cb33ba-ebb2-422f-974a-43d02bdca002-kube-api-access-hqs7x\") pod \"99cb33ba-ebb2-422f-974a-43d02bdca002\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.619187 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-swiftconf\") pod \"99cb33ba-ebb2-422f-974a-43d02bdca002\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.619222 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-scripts\") pod \"99cb33ba-ebb2-422f-974a-43d02bdca002\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.619303 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/99cb33ba-ebb2-422f-974a-43d02bdca002-etc-swift\") pod \"99cb33ba-ebb2-422f-974a-43d02bdca002\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.619332 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-dispersionconf\") pod \"99cb33ba-ebb2-422f-974a-43d02bdca002\" (UID: \"99cb33ba-ebb2-422f-974a-43d02bdca002\") " Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.621463 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "99cb33ba-ebb2-422f-974a-43d02bdca002" (UID: "99cb33ba-ebb2-422f-974a-43d02bdca002"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.622903 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99cb33ba-ebb2-422f-974a-43d02bdca002-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "99cb33ba-ebb2-422f-974a-43d02bdca002" (UID: "99cb33ba-ebb2-422f-974a-43d02bdca002"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.627118 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99cb33ba-ebb2-422f-974a-43d02bdca002-kube-api-access-hqs7x" (OuterVolumeSpecName: "kube-api-access-hqs7x") pod "99cb33ba-ebb2-422f-974a-43d02bdca002" (UID: "99cb33ba-ebb2-422f-974a-43d02bdca002"). InnerVolumeSpecName "kube-api-access-hqs7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.667492 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-scripts" (OuterVolumeSpecName: "scripts") pod "99cb33ba-ebb2-422f-974a-43d02bdca002" (UID: "99cb33ba-ebb2-422f-974a-43d02bdca002"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.670150 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "99cb33ba-ebb2-422f-974a-43d02bdca002" (UID: "99cb33ba-ebb2-422f-974a-43d02bdca002"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.684169 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "99cb33ba-ebb2-422f-974a-43d02bdca002" (UID: "99cb33ba-ebb2-422f-974a-43d02bdca002"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.716561 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.718610 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-server" containerID="cri-o://9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.718667 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="swift-recon-cron" containerID="cri-o://1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.718809 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="rsync" containerID="cri-o://e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.718864 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-expirer" containerID="cri-o://5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.718917 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-updater" containerID="cri-o://fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.718962 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-auditor" containerID="cri-o://a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719007 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-replicator" containerID="cri-o://67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719047 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-server" containerID="cri-o://509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719096 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-updater" containerID="cri-o://f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719156 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-auditor" containerID="cri-o://d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719207 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-replicator" containerID="cri-o://420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719255 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-server" containerID="cri-o://a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719303 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-sharder" containerID="cri-o://3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719327 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-reaper" containerID="cri-o://160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719382 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-auditor" containerID="cri-o://c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.719461 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-replicator" containerID="cri-o://1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.734266 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqs7x\" (UniqueName: \"kubernetes.io/projected/99cb33ba-ebb2-422f-974a-43d02bdca002-kube-api-access-hqs7x\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.734350 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.734366 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.734379 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/99cb33ba-ebb2-422f-974a-43d02bdca002-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.734393 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/99cb33ba-ebb2-422f-974a-43d02bdca002-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.734408 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/99cb33ba-ebb2-422f-974a-43d02bdca002-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.759274 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-x6z4g"] Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.775506 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-x6z4g"] Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.783820 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv"] Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.784166 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerName="proxy-httpd" containerID="cri-o://7d24ccd5d3d9eddeaf1d9df0e4b819ad90c3bcc235e709d7616ae089e7eafad9" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: I0126 11:03:21.784444 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerName="proxy-server" containerID="cri-o://fb6119ecedd35cef11c530cefdd43e105fc3f9289d0e7fd60bc2eecb9bd27a5f" gracePeriod=30 Jan 26 11:03:21 crc kubenswrapper[5003]: E0126 11:03:21.977426 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-conmon-fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-conmon-160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-conmon-5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-conmon-c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c1aa18c_b3aa_4ab3_a941_5a45632ee873.slice/crio-conmon-7d24ccd5d3d9eddeaf1d9df0e4b819ad90c3bcc235e709d7616ae089e7eafad9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-conmon-3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-conmon-67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-conmon-420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb324618_6b4b_44b1_b223_8f4cd5680226.slice/crio-67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da.scope\": RecentStats: unable to find data in memory cache]" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.232512 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60cd9fd616fa5c1749287833386b1194dc609871d59d7162a9a51168548c3aa0" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.232571 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-mlfn5" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.236040 5003 generic.go:334] "Generic (PLEG): container finished" podID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerID="fb6119ecedd35cef11c530cefdd43e105fc3f9289d0e7fd60bc2eecb9bd27a5f" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.236072 5003 generic.go:334] "Generic (PLEG): container finished" podID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerID="7d24ccd5d3d9eddeaf1d9df0e4b819ad90c3bcc235e709d7616ae089e7eafad9" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.236123 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" event={"ID":"9c1aa18c-b3aa-4ab3-a941-5a45632ee873","Type":"ContainerDied","Data":"fb6119ecedd35cef11c530cefdd43e105fc3f9289d0e7fd60bc2eecb9bd27a5f"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.236168 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" event={"ID":"9c1aa18c-b3aa-4ab3-a941-5a45632ee873","Type":"ContainerDied","Data":"7d24ccd5d3d9eddeaf1d9df0e4b819ad90c3bcc235e709d7616ae089e7eafad9"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242879 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242915 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242928 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242938 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242945 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242951 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242959 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242966 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242976 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242986 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.242996 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243007 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243015 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243024 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243033 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d" exitCode=0 Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243061 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243095 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243110 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243122 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.243135 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244411 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244437 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244449 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244460 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244475 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244485 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244506 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.244516 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d"} Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.356007 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.447791 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-run-httpd\") pod \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.447897 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqzdm\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-kube-api-access-sqzdm\") pod \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.448039 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-config-data\") pod \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.448098 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-log-httpd\") pod \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.448157 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-etc-swift\") pod \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\" (UID: \"9c1aa18c-b3aa-4ab3-a941-5a45632ee873\") " Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.448763 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9c1aa18c-b3aa-4ab3-a941-5a45632ee873" (UID: "9c1aa18c-b3aa-4ab3-a941-5a45632ee873"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.449744 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9c1aa18c-b3aa-4ab3-a941-5a45632ee873" (UID: "9c1aa18c-b3aa-4ab3-a941-5a45632ee873"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.456961 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "9c1aa18c-b3aa-4ab3-a941-5a45632ee873" (UID: "9c1aa18c-b3aa-4ab3-a941-5a45632ee873"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.459878 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-kube-api-access-sqzdm" (OuterVolumeSpecName: "kube-api-access-sqzdm") pod "9c1aa18c-b3aa-4ab3-a941-5a45632ee873" (UID: "9c1aa18c-b3aa-4ab3-a941-5a45632ee873"). InnerVolumeSpecName "kube-api-access-sqzdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.493310 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-config-data" (OuterVolumeSpecName: "config-data") pod "9c1aa18c-b3aa-4ab3-a941-5a45632ee873" (UID: "9c1aa18c-b3aa-4ab3-a941-5a45632ee873"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.550606 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.550649 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.550660 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.550672 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:22 crc kubenswrapper[5003]: I0126 11:03:22.550685 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqzdm\" (UniqueName: \"kubernetes.io/projected/9c1aa18c-b3aa-4ab3-a941-5a45632ee873-kube-api-access-sqzdm\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:23 crc kubenswrapper[5003]: I0126 11:03:23.016456 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e4c83f1-0d2b-4389-b5fd-d3497db222d2" path="/var/lib/kubelet/pods/2e4c83f1-0d2b-4389-b5fd-d3497db222d2/volumes" Jan 26 11:03:23 crc kubenswrapper[5003]: I0126 11:03:23.017214 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99cb33ba-ebb2-422f-974a-43d02bdca002" path="/var/lib/kubelet/pods/99cb33ba-ebb2-422f-974a-43d02bdca002/volumes" Jan 26 11:03:23 crc kubenswrapper[5003]: I0126 11:03:23.254733 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" event={"ID":"9c1aa18c-b3aa-4ab3-a941-5a45632ee873","Type":"ContainerDied","Data":"ed80dfd860e47b7162b8a3c0d40e4189cfc6212f10878e4cfa85ba973d2f4d75"} Jan 26 11:03:23 crc kubenswrapper[5003]: I0126 11:03:23.254807 5003 scope.go:117] "RemoveContainer" containerID="fb6119ecedd35cef11c530cefdd43e105fc3f9289d0e7fd60bc2eecb9bd27a5f" Jan 26 11:03:23 crc kubenswrapper[5003]: I0126 11:03:23.254999 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv" Jan 26 11:03:23 crc kubenswrapper[5003]: I0126 11:03:23.278142 5003 scope.go:117] "RemoveContainer" containerID="7d24ccd5d3d9eddeaf1d9df0e4b819ad90c3bcc235e709d7616ae089e7eafad9" Jan 26 11:03:23 crc kubenswrapper[5003]: I0126 11:03:23.283292 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv"] Jan 26 11:03:23 crc kubenswrapper[5003]: I0126 11:03:23.292994 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-59d59699f5-g8jvv"] Jan 26 11:03:25 crc kubenswrapper[5003]: I0126 11:03:25.010154 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" path="/var/lib/kubelet/pods/9c1aa18c-b3aa-4ab3-a941-5a45632ee873/volumes" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.241984 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.394083 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-lock\") pod \"db324618-6b4b-44b1-b223-8f4cd5680226\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.394149 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-cache\") pod \"db324618-6b4b-44b1-b223-8f4cd5680226\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.394219 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") pod \"db324618-6b4b-44b1-b223-8f4cd5680226\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.394259 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcmzj\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-kube-api-access-tcmzj\") pod \"db324618-6b4b-44b1-b223-8f4cd5680226\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.394365 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"db324618-6b4b-44b1-b223-8f4cd5680226\" (UID: \"db324618-6b4b-44b1-b223-8f4cd5680226\") " Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.395445 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-cache" (OuterVolumeSpecName: "cache") pod "db324618-6b4b-44b1-b223-8f4cd5680226" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.395469 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-lock" (OuterVolumeSpecName: "lock") pod "db324618-6b4b-44b1-b223-8f4cd5680226" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.402130 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-kube-api-access-tcmzj" (OuterVolumeSpecName: "kube-api-access-tcmzj") pod "db324618-6b4b-44b1-b223-8f4cd5680226" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226"). InnerVolumeSpecName "kube-api-access-tcmzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.402343 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "db324618-6b4b-44b1-b223-8f4cd5680226" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.402885 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "swift") pod "db324618-6b4b-44b1-b223-8f4cd5680226" (UID: "db324618-6b4b-44b1-b223-8f4cd5680226"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.495940 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.496420 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/db324618-6b4b-44b1-b223-8f4cd5680226-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.496465 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.496478 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcmzj\" (UniqueName: \"kubernetes.io/projected/db324618-6b4b-44b1-b223-8f4cd5680226-kube-api-access-tcmzj\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.496525 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.510031 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.542607 5003 generic.go:334] "Generic (PLEG): container finished" podID="db324618-6b4b-44b1-b223-8f4cd5680226" containerID="1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02" exitCode=137 Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.542684 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02"} Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.542745 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"db324618-6b4b-44b1-b223-8f4cd5680226","Type":"ContainerDied","Data":"1cf301e58efeec002ee983f45bddd5a566b3ce240df06d3e52721207a789a1c4"} Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.542766 5003 scope.go:117] "RemoveContainer" containerID="3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.542849 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.573663 5003 scope.go:117] "RemoveContainer" containerID="1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.599035 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.603113 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.608734 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.614492 5003 scope.go:117] "RemoveContainer" containerID="e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.641465 5003 scope.go:117] "RemoveContainer" containerID="5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.661275 5003 scope.go:117] "RemoveContainer" containerID="fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.684254 5003 scope.go:117] "RemoveContainer" containerID="a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.709271 5003 scope.go:117] "RemoveContainer" containerID="67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.739790 5003 scope.go:117] "RemoveContainer" containerID="509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.762630 5003 scope.go:117] "RemoveContainer" containerID="f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.788868 5003 scope.go:117] "RemoveContainer" containerID="d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.808486 5003 scope.go:117] "RemoveContainer" containerID="420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.830467 5003 scope.go:117] "RemoveContainer" containerID="a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.850565 5003 scope.go:117] "RemoveContainer" containerID="160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.868927 5003 scope.go:117] "RemoveContainer" containerID="c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.896330 5003 scope.go:117] "RemoveContainer" containerID="1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.917155 5003 scope.go:117] "RemoveContainer" containerID="9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.939164 5003 scope.go:117] "RemoveContainer" containerID="3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.940116 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084\": container with ID starting with 3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084 not found: ID does not exist" containerID="3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.940226 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084"} err="failed to get container status \"3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084\": rpc error: code = NotFound desc = could not find container \"3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084\": container with ID starting with 3cde556799c2921d1b5f4dd05c1e984d283e7ac4afefd6fa610f04937d39a084 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.940338 5003 scope.go:117] "RemoveContainer" containerID="1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.941068 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02\": container with ID starting with 1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02 not found: ID does not exist" containerID="1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.941122 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02"} err="failed to get container status \"1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02\": rpc error: code = NotFound desc = could not find container \"1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02\": container with ID starting with 1e66bacd66c1713cb8ddb28b000373a5af6a89908aff178280a3238996ef4c02 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.941155 5003 scope.go:117] "RemoveContainer" containerID="e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.941799 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f\": container with ID starting with e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f not found: ID does not exist" containerID="e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.941833 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f"} err="failed to get container status \"e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f\": rpc error: code = NotFound desc = could not find container \"e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f\": container with ID starting with e207a9e19bd5fae5cf1e7b106b156be05674e02e02f4823d67a39c93dbd0b58f not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.941858 5003 scope.go:117] "RemoveContainer" containerID="5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.942231 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630\": container with ID starting with 5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630 not found: ID does not exist" containerID="5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.942259 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630"} err="failed to get container status \"5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630\": rpc error: code = NotFound desc = could not find container \"5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630\": container with ID starting with 5b7afa0645c36e901a5e91815cf8f17bbb22db46aceff23a3f3aa9556fb0c630 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.942282 5003 scope.go:117] "RemoveContainer" containerID="fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.942690 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9\": container with ID starting with fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9 not found: ID does not exist" containerID="fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.942750 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9"} err="failed to get container status \"fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9\": rpc error: code = NotFound desc = could not find container \"fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9\": container with ID starting with fdad5e8e97120339d10d7af6547b7674530431afef576d08a34a038300b82aa9 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.942771 5003 scope.go:117] "RemoveContainer" containerID="a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.943831 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b\": container with ID starting with a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b not found: ID does not exist" containerID="a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.943862 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b"} err="failed to get container status \"a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b\": rpc error: code = NotFound desc = could not find container \"a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b\": container with ID starting with a7c3cb6d1fbe60a5bea5b9905a6d82ad2fbaaad5f9c9d0f66fd4e1983046fc2b not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.943883 5003 scope.go:117] "RemoveContainer" containerID="67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.944215 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da\": container with ID starting with 67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da not found: ID does not exist" containerID="67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.944247 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da"} err="failed to get container status \"67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da\": rpc error: code = NotFound desc = could not find container \"67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da\": container with ID starting with 67eb5409482dd789330246caef477b0b42a7703be6d48d5458e12acc8a4f91da not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.944271 5003 scope.go:117] "RemoveContainer" containerID="509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.944663 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870\": container with ID starting with 509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870 not found: ID does not exist" containerID="509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.944694 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870"} err="failed to get container status \"509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870\": rpc error: code = NotFound desc = could not find container \"509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870\": container with ID starting with 509aef8b430af4383a56bfdb0a66e724ad5728388c4150824d200fe4b5f3e870 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.944714 5003 scope.go:117] "RemoveContainer" containerID="f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.945036 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4\": container with ID starting with f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4 not found: ID does not exist" containerID="f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.945066 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4"} err="failed to get container status \"f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4\": rpc error: code = NotFound desc = could not find container \"f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4\": container with ID starting with f0622d6558a77138f1fdd4b2e3211e40e8eb3e66c3cea67d04d895f11a220da4 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.945089 5003 scope.go:117] "RemoveContainer" containerID="d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.945403 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8\": container with ID starting with d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8 not found: ID does not exist" containerID="d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.945434 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8"} err="failed to get container status \"d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8\": rpc error: code = NotFound desc = could not find container \"d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8\": container with ID starting with d9aecb65386c41feca3abf2a5fbe57a0f986fa234a670e526653809cf34c4cf8 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.945456 5003 scope.go:117] "RemoveContainer" containerID="420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.945798 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6\": container with ID starting with 420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6 not found: ID does not exist" containerID="420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.945869 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6"} err="failed to get container status \"420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6\": rpc error: code = NotFound desc = could not find container \"420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6\": container with ID starting with 420ff3f87b1107618aa2357f6d84d429f41c0f858c95fcda74efdc511fe0e7e6 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.945893 5003 scope.go:117] "RemoveContainer" containerID="a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.946351 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499\": container with ID starting with a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499 not found: ID does not exist" containerID="a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.946419 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499"} err="failed to get container status \"a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499\": rpc error: code = NotFound desc = could not find container \"a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499\": container with ID starting with a34d33467d6f6d242a3b707751dbe7354c20189edff14d262a5864925e808499 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.946442 5003 scope.go:117] "RemoveContainer" containerID="160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.946909 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e\": container with ID starting with 160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e not found: ID does not exist" containerID="160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.946946 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e"} err="failed to get container status \"160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e\": rpc error: code = NotFound desc = could not find container \"160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e\": container with ID starting with 160183d14ed2611dea2b016a758359c9df8c3dcf8342ded4c5f95f3ed1bf904e not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.946967 5003 scope.go:117] "RemoveContainer" containerID="c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.947496 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85\": container with ID starting with c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85 not found: ID does not exist" containerID="c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.947520 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85"} err="failed to get container status \"c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85\": rpc error: code = NotFound desc = could not find container \"c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85\": container with ID starting with c689e9c623cc0252896cf6beb81033e8123e65657279b75f6c34bce37d87bd85 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.947533 5003 scope.go:117] "RemoveContainer" containerID="1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.947917 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76\": container with ID starting with 1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76 not found: ID does not exist" containerID="1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.947956 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76"} err="failed to get container status \"1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76\": rpc error: code = NotFound desc = could not find container \"1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76\": container with ID starting with 1680262baddb5d668d5409f050342fd890de576a0c7625f25e3742c2835b5a76 not found: ID does not exist" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.948002 5003 scope.go:117] "RemoveContainer" containerID="9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d" Jan 26 11:03:52 crc kubenswrapper[5003]: E0126 11:03:52.948434 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d\": container with ID starting with 9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d not found: ID does not exist" containerID="9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d" Jan 26 11:03:52 crc kubenswrapper[5003]: I0126 11:03:52.948464 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d"} err="failed to get container status \"9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d\": rpc error: code = NotFound desc = could not find container \"9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d\": container with ID starting with 9fb4b2da57c09b06d963087a7b2d5350926dcd7da35b278befbe31ad552a754d not found: ID does not exist" Jan 26 11:03:53 crc kubenswrapper[5003]: I0126 11:03:53.013418 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" path="/var/lib/kubelet/pods/db324618-6b4b-44b1-b223-8f4cd5680226/volumes" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.047328 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048079 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048097 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048116 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-updater" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048124 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-updater" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048134 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048140 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048152 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048160 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048171 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-server" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048179 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-server" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048190 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-reaper" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048196 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-reaper" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048207 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048214 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048222 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-expirer" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048228 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-expirer" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048236 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048242 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048249 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048255 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048271 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerName="proxy-httpd" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048281 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerName="proxy-httpd" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048304 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99cb33ba-ebb2-422f-974a-43d02bdca002" containerName="swift-ring-rebalance" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048312 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="99cb33ba-ebb2-422f-974a-43d02bdca002" containerName="swift-ring-rebalance" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048324 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerName="proxy-server" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048331 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerName="proxy-server" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048366 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-server" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048372 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-server" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048380 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-server" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048387 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-server" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048398 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-sharder" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048407 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-sharder" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048420 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-updater" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048429 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-updater" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048437 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="rsync" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048443 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="rsync" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.048453 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="swift-recon-cron" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048460 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="swift-recon-cron" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048590 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048602 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="rsync" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048611 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerName="proxy-httpd" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048619 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-updater" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048629 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-expirer" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048636 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c1aa18c-b3aa-4ab3-a941-5a45632ee873" containerName="proxy-server" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048643 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-server" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048650 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="swift-recon-cron" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048659 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048666 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048673 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="99cb33ba-ebb2-422f-974a-43d02bdca002" containerName="swift-ring-rebalance" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048682 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-updater" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048691 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-sharder" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048698 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048706 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-replicator" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048712 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-server" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048719 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="container-server" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048725 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="object-auditor" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.048733 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="db324618-6b4b-44b1-b223-8f4cd5680226" containerName="account-reaper" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.054334 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.057002 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.057521 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.058230 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-dgrlq" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.058438 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.113873 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.133324 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.141043 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.141210 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwjgs\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-kube-api-access-jwjgs\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.141270 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-cache\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.141372 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.141430 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-lock\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.146932 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.147236 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.176488 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.176561 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.176765 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243169 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243257 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243313 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243348 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243394 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwjgs\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-kube-api-access-jwjgs\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243428 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243450 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-cache\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.243445 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243479 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-lock\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243519 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-cache\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243554 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-cache\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.243487 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.243672 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift podName:510b1209-97a2-4a4c-bc62-60d5c9ce6bcd nodeName:}" failed. No retries permitted until 2026-01-26 11:03:55.743638511 +0000 UTC m=+1251.284864272 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift") pod "swift-storage-0" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd") : configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243802 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.243924 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xms9z\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-kube-api-access-xms9z\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.244049 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-lock\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.244126 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") device mount path \"/mnt/openstack/pv07\"" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.244138 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2z52\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-kube-api-access-d2z52\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.244180 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-lock\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.244246 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-cache\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.244949 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-lock\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.269242 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.272897 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwjgs\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-kube-api-access-jwjgs\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.346624 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.347240 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.347372 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.347497 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.347576 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") device mount path \"/mnt/openstack/pv06\"" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.347115 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.347689 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.347597 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-cache\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.347337 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.348039 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.348050 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") device mount path \"/mnt/openstack/pv12\"" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.348084 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift podName:f66de5f0-1dc9-497e-828e-563484b9f60e nodeName:}" failed. No retries permitted until 2026-01-26 11:03:55.847985671 +0000 UTC m=+1251.389211232 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift") pod "swift-storage-2" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e") : configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.348102 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift podName:85e899d6-b28a-4b65-bbed-90648be93627 nodeName:}" failed. No retries permitted until 2026-01-26 11:03:55.848095084 +0000 UTC m=+1251.389320645 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift") pod "swift-storage-1" (UID: "85e899d6-b28a-4b65-bbed-90648be93627") : configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.348226 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-lock\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.348402 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-cache\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.348841 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-cache\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.348924 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-lock\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.349170 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-cache\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.349617 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xms9z\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-kube-api-access-xms9z\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.350161 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-lock\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.350585 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-lock\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.350911 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2z52\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-kube-api-access-d2z52\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.368327 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.370940 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xms9z\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-kube-api-access-xms9z\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.372486 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2z52\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-kube-api-access-d2z52\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.373629 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.481095 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-bzq42"] Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.482730 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.484717 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.486281 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.487394 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.499598 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-bzq42"] Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.555471 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-swiftconf\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.555597 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/867792a2-0351-43c7-88c9-e28a5305d7de-etc-swift\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.555715 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-ring-data-devices\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.555762 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-dispersionconf\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.555809 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-scripts\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.555852 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mczs\" (UniqueName: \"kubernetes.io/projected/867792a2-0351-43c7-88c9-e28a5305d7de-kube-api-access-4mczs\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.657022 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-swiftconf\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.657081 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/867792a2-0351-43c7-88c9-e28a5305d7de-etc-swift\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.657134 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-ring-data-devices\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.657152 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-dispersionconf\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.657176 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-scripts\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.657201 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mczs\" (UniqueName: \"kubernetes.io/projected/867792a2-0351-43c7-88c9-e28a5305d7de-kube-api-access-4mczs\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.657620 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/867792a2-0351-43c7-88c9-e28a5305d7de-etc-swift\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.658438 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-scripts\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.658713 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-ring-data-devices\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.662854 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-swiftconf\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.662913 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-dispersionconf\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.680398 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mczs\" (UniqueName: \"kubernetes.io/projected/867792a2-0351-43c7-88c9-e28a5305d7de-kube-api-access-4mczs\") pod \"swift-ring-rebalance-bzq42\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.758907 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.759707 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.759765 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.759853 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift podName:510b1209-97a2-4a4c-bc62-60d5c9ce6bcd nodeName:}" failed. No retries permitted until 2026-01-26 11:03:56.759825829 +0000 UTC m=+1252.301051390 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift") pod "swift-storage-0" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd") : configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.806204 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.860102 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:55 crc kubenswrapper[5003]: I0126 11:03:55.860185 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.860377 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.860399 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.860436 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.860475 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.860451 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift podName:f66de5f0-1dc9-497e-828e-563484b9f60e nodeName:}" failed. No retries permitted until 2026-01-26 11:03:56.860435901 +0000 UTC m=+1252.401661462 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift") pod "swift-storage-2" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e") : configmap "swift-ring-files" not found Jan 26 11:03:55 crc kubenswrapper[5003]: E0126 11:03:55.860608 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift podName:85e899d6-b28a-4b65-bbed-90648be93627 nodeName:}" failed. No retries permitted until 2026-01-26 11:03:56.860578966 +0000 UTC m=+1252.401804527 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift") pod "swift-storage-1" (UID: "85e899d6-b28a-4b65-bbed-90648be93627") : configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.098077 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-bzq42"] Jan 26 11:03:56 crc kubenswrapper[5003]: W0126 11:03:56.111185 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod867792a2_0351_43c7_88c9_e28a5305d7de.slice/crio-9c7c6d9f0eda2e319be722ba17c559c3ddf09c203229581b6bb1e8bcc325857e WatchSource:0}: Error finding container 9c7c6d9f0eda2e319be722ba17c559c3ddf09c203229581b6bb1e8bcc325857e: Status 404 returned error can't find the container with id 9c7c6d9f0eda2e319be722ba17c559c3ddf09c203229581b6bb1e8bcc325857e Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.368628 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5"] Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.370120 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.385093 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5"] Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.474537 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxt2x\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-kube-api-access-hxt2x\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.474614 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.474638 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-log-httpd\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.474689 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168289f3-76bd-4518-8672-b02c64df8a27-config-data\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.474814 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-run-httpd\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.581263 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-run-httpd\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.581462 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxt2x\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-kube-api-access-hxt2x\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.581504 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.581521 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-log-httpd\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.581564 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168289f3-76bd-4518-8672-b02c64df8a27-config-data\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.581762 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.581805 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5: configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.581905 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift podName:168289f3-76bd-4518-8672-b02c64df8a27 nodeName:}" failed. No retries permitted until 2026-01-26 11:03:57.081874781 +0000 UTC m=+1252.623100342 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift") pod "swift-proxy-67f6cc5479-w87g5" (UID: "168289f3-76bd-4518-8672-b02c64df8a27") : configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.582142 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-run-httpd\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.582447 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-log-httpd\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.589064 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168289f3-76bd-4518-8672-b02c64df8a27-config-data\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.595194 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" event={"ID":"867792a2-0351-43c7-88c9-e28a5305d7de","Type":"ContainerStarted","Data":"6447e5e50432c5444f94d13c04a931686ff50ffeb66e6656939310c7f6336729"} Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.595320 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" event={"ID":"867792a2-0351-43c7-88c9-e28a5305d7de","Type":"ContainerStarted","Data":"9c7c6d9f0eda2e319be722ba17c559c3ddf09c203229581b6bb1e8bcc325857e"} Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.619372 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxt2x\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-kube-api-access-hxt2x\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.620693 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" podStartSLOduration=1.6206781129999999 podStartE2EDuration="1.620678113s" podCreationTimestamp="2026-01-26 11:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:03:56.612415786 +0000 UTC m=+1252.153641357" watchObservedRunningTime="2026-01-26 11:03:56.620678113 +0000 UTC m=+1252.161903674" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.785368 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.785625 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.785961 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.786044 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift podName:510b1209-97a2-4a4c-bc62-60d5c9ce6bcd nodeName:}" failed. No retries permitted until 2026-01-26 11:03:58.78602293 +0000 UTC m=+1254.327248491 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift") pod "swift-storage-0" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd") : configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.887245 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:56 crc kubenswrapper[5003]: I0126 11:03:56.887350 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.887574 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.887596 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.887670 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift podName:85e899d6-b28a-4b65-bbed-90648be93627 nodeName:}" failed. No retries permitted until 2026-01-26 11:03:58.887651661 +0000 UTC m=+1254.428877222 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift") pod "swift-storage-1" (UID: "85e899d6-b28a-4b65-bbed-90648be93627") : configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.887727 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.887782 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Jan 26 11:03:56 crc kubenswrapper[5003]: E0126 11:03:56.888066 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift podName:f66de5f0-1dc9-497e-828e-563484b9f60e nodeName:}" failed. No retries permitted until 2026-01-26 11:03:58.887849377 +0000 UTC m=+1254.429074938 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift") pod "swift-storage-2" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e") : configmap "swift-ring-files" not found Jan 26 11:03:57 crc kubenswrapper[5003]: I0126 11:03:57.090681 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:57 crc kubenswrapper[5003]: E0126 11:03:57.090894 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:57 crc kubenswrapper[5003]: E0126 11:03:57.090961 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5: configmap "swift-ring-files" not found Jan 26 11:03:57 crc kubenswrapper[5003]: E0126 11:03:57.091044 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift podName:168289f3-76bd-4518-8672-b02c64df8a27 nodeName:}" failed. No retries permitted until 2026-01-26 11:03:58.091019858 +0000 UTC m=+1253.632245409 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift") pod "swift-proxy-67f6cc5479-w87g5" (UID: "168289f3-76bd-4518-8672-b02c64df8a27") : configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: I0126 11:03:58.112696 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.112831 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.112866 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5: configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.112929 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift podName:168289f3-76bd-4518-8672-b02c64df8a27 nodeName:}" failed. No retries permitted until 2026-01-26 11:04:00.112904465 +0000 UTC m=+1255.654130026 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift") pod "swift-proxy-67f6cc5479-w87g5" (UID: "168289f3-76bd-4518-8672-b02c64df8a27") : configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: I0126 11:03:58.825237 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.825520 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.825722 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.825794 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift podName:510b1209-97a2-4a4c-bc62-60d5c9ce6bcd nodeName:}" failed. No retries permitted until 2026-01-26 11:04:02.825769729 +0000 UTC m=+1258.366995290 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift") pod "swift-storage-0" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd") : configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: I0126 11:03:58.927703 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:03:58 crc kubenswrapper[5003]: I0126 11:03:58.927752 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.927910 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.927927 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.927990 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift podName:f66de5f0-1dc9-497e-828e-563484b9f60e nodeName:}" failed. No retries permitted until 2026-01-26 11:04:02.927971327 +0000 UTC m=+1258.469196888 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift") pod "swift-storage-2" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e") : configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.927991 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.928042 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Jan 26 11:03:58 crc kubenswrapper[5003]: E0126 11:03:58.928121 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift podName:85e899d6-b28a-4b65-bbed-90648be93627 nodeName:}" failed. No retries permitted until 2026-01-26 11:04:02.928097391 +0000 UTC m=+1258.469322952 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift") pod "swift-storage-1" (UID: "85e899d6-b28a-4b65-bbed-90648be93627") : configmap "swift-ring-files" not found Jan 26 11:04:00 crc kubenswrapper[5003]: I0126 11:04:00.149382 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:00 crc kubenswrapper[5003]: E0126 11:04:00.149600 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:04:00 crc kubenswrapper[5003]: E0126 11:04:00.149662 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5: configmap "swift-ring-files" not found Jan 26 11:04:00 crc kubenswrapper[5003]: E0126 11:04:00.149759 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift podName:168289f3-76bd-4518-8672-b02c64df8a27 nodeName:}" failed. No retries permitted until 2026-01-26 11:04:04.149729109 +0000 UTC m=+1259.690954720 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift") pod "swift-proxy-67f6cc5479-w87g5" (UID: "168289f3-76bd-4518-8672-b02c64df8a27") : configmap "swift-ring-files" not found Jan 26 11:04:02 crc kubenswrapper[5003]: I0126 11:04:02.902217 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:04:02 crc kubenswrapper[5003]: E0126 11:04:02.902473 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:04:02 crc kubenswrapper[5003]: E0126 11:04:02.902697 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:04:02 crc kubenswrapper[5003]: E0126 11:04:02.902791 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift podName:510b1209-97a2-4a4c-bc62-60d5c9ce6bcd nodeName:}" failed. No retries permitted until 2026-01-26 11:04:10.902772444 +0000 UTC m=+1266.443998005 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift") pod "swift-storage-0" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd") : configmap "swift-ring-files" not found Jan 26 11:04:03 crc kubenswrapper[5003]: I0126 11:04:03.003863 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:04:03 crc kubenswrapper[5003]: E0126 11:04:03.004137 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:04:03 crc kubenswrapper[5003]: E0126 11:04:03.004198 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Jan 26 11:04:03 crc kubenswrapper[5003]: I0126 11:04:03.004167 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:04:03 crc kubenswrapper[5003]: E0126 11:04:03.004272 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift podName:f66de5f0-1dc9-497e-828e-563484b9f60e nodeName:}" failed. No retries permitted until 2026-01-26 11:04:11.004243601 +0000 UTC m=+1266.545469172 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift") pod "swift-storage-2" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e") : configmap "swift-ring-files" not found Jan 26 11:04:03 crc kubenswrapper[5003]: E0126 11:04:03.004539 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:04:03 crc kubenswrapper[5003]: E0126 11:04:03.004676 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Jan 26 11:04:03 crc kubenswrapper[5003]: E0126 11:04:03.004794 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift podName:85e899d6-b28a-4b65-bbed-90648be93627 nodeName:}" failed. No retries permitted until 2026-01-26 11:04:11.004767196 +0000 UTC m=+1266.545992817 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift") pod "swift-storage-1" (UID: "85e899d6-b28a-4b65-bbed-90648be93627") : configmap "swift-ring-files" not found Jan 26 11:04:04 crc kubenswrapper[5003]: I0126 11:04:04.226134 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:04 crc kubenswrapper[5003]: E0126 11:04:04.226399 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:04:04 crc kubenswrapper[5003]: E0126 11:04:04.226577 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5: configmap "swift-ring-files" not found Jan 26 11:04:04 crc kubenswrapper[5003]: E0126 11:04:04.226639 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift podName:168289f3-76bd-4518-8672-b02c64df8a27 nodeName:}" failed. No retries permitted until 2026-01-26 11:04:12.226618872 +0000 UTC m=+1267.767844423 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift") pod "swift-proxy-67f6cc5479-w87g5" (UID: "168289f3-76bd-4518-8672-b02c64df8a27") : configmap "swift-ring-files" not found Jan 26 11:04:07 crc kubenswrapper[5003]: I0126 11:04:07.699945 5003 generic.go:334] "Generic (PLEG): container finished" podID="867792a2-0351-43c7-88c9-e28a5305d7de" containerID="6447e5e50432c5444f94d13c04a931686ff50ffeb66e6656939310c7f6336729" exitCode=0 Jan 26 11:04:07 crc kubenswrapper[5003]: I0126 11:04:07.700228 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" event={"ID":"867792a2-0351-43c7-88c9-e28a5305d7de","Type":"ContainerDied","Data":"6447e5e50432c5444f94d13c04a931686ff50ffeb66e6656939310c7f6336729"} Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.018251 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.104401 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/867792a2-0351-43c7-88c9-e28a5305d7de-etc-swift\") pod \"867792a2-0351-43c7-88c9-e28a5305d7de\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.104543 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-swiftconf\") pod \"867792a2-0351-43c7-88c9-e28a5305d7de\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.104741 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-scripts\") pod \"867792a2-0351-43c7-88c9-e28a5305d7de\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.104785 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mczs\" (UniqueName: \"kubernetes.io/projected/867792a2-0351-43c7-88c9-e28a5305d7de-kube-api-access-4mczs\") pod \"867792a2-0351-43c7-88c9-e28a5305d7de\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.104858 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-dispersionconf\") pod \"867792a2-0351-43c7-88c9-e28a5305d7de\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.104935 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-ring-data-devices\") pod \"867792a2-0351-43c7-88c9-e28a5305d7de\" (UID: \"867792a2-0351-43c7-88c9-e28a5305d7de\") " Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.105718 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "867792a2-0351-43c7-88c9-e28a5305d7de" (UID: "867792a2-0351-43c7-88c9-e28a5305d7de"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.105799 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/867792a2-0351-43c7-88c9-e28a5305d7de-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "867792a2-0351-43c7-88c9-e28a5305d7de" (UID: "867792a2-0351-43c7-88c9-e28a5305d7de"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.106435 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.106476 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/867792a2-0351-43c7-88c9-e28a5305d7de-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.110160 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/867792a2-0351-43c7-88c9-e28a5305d7de-kube-api-access-4mczs" (OuterVolumeSpecName: "kube-api-access-4mczs") pod "867792a2-0351-43c7-88c9-e28a5305d7de" (UID: "867792a2-0351-43c7-88c9-e28a5305d7de"). InnerVolumeSpecName "kube-api-access-4mczs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.126609 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-scripts" (OuterVolumeSpecName: "scripts") pod "867792a2-0351-43c7-88c9-e28a5305d7de" (UID: "867792a2-0351-43c7-88c9-e28a5305d7de"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.129658 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "867792a2-0351-43c7-88c9-e28a5305d7de" (UID: "867792a2-0351-43c7-88c9-e28a5305d7de"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.131103 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "867792a2-0351-43c7-88c9-e28a5305d7de" (UID: "867792a2-0351-43c7-88c9-e28a5305d7de"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.207660 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/867792a2-0351-43c7-88c9-e28a5305d7de-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.207702 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mczs\" (UniqueName: \"kubernetes.io/projected/867792a2-0351-43c7-88c9-e28a5305d7de-kube-api-access-4mczs\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.207718 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.207730 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/867792a2-0351-43c7-88c9-e28a5305d7de-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.715800 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" event={"ID":"867792a2-0351-43c7-88c9-e28a5305d7de","Type":"ContainerDied","Data":"9c7c6d9f0eda2e319be722ba17c559c3ddf09c203229581b6bb1e8bcc325857e"} Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.715846 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c7c6d9f0eda2e319be722ba17c559c3ddf09c203229581b6bb1e8bcc325857e" Jan 26 11:04:09 crc kubenswrapper[5003]: I0126 11:04:09.715863 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-bzq42" Jan 26 11:04:10 crc kubenswrapper[5003]: I0126 11:04:10.934696 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:04:10 crc kubenswrapper[5003]: I0126 11:04:10.945212 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"swift-storage-0\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:04:10 crc kubenswrapper[5003]: I0126 11:04:10.995264 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.037060 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.037126 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.042154 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"swift-storage-1\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.043677 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"swift-storage-2\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.081716 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.098979 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.447802 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:04:11 crc kubenswrapper[5003]: W0126 11:04:11.454983 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod510b1209_97a2_4a4c_bc62_60d5c9ce6bcd.slice/crio-f8c4785296e0f598b91e15594e02e9fea625d0b829a5ea6f7723c7befc440b9a WatchSource:0}: Error finding container f8c4785296e0f598b91e15594e02e9fea625d0b829a5ea6f7723c7befc440b9a: Status 404 returned error can't find the container with id f8c4785296e0f598b91e15594e02e9fea625d0b829a5ea6f7723c7befc440b9a Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.546424 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.633029 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:04:11 crc kubenswrapper[5003]: W0126 11:04:11.639993 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf66de5f0_1dc9_497e_828e_563484b9f60e.slice/crio-52abf7d4ccdebca66eca1b67b807063a14f8054ac72e6274b7585334016a8a51 WatchSource:0}: Error finding container 52abf7d4ccdebca66eca1b67b807063a14f8054ac72e6274b7585334016a8a51: Status 404 returned error can't find the container with id 52abf7d4ccdebca66eca1b67b807063a14f8054ac72e6274b7585334016a8a51 Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.740445 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"23f16ba6e5ba6a363d0ee9ae965d1b10aa7a42784a6480cb45492d47033a7ba8"} Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.740538 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"a669710f32b9961ee95e2e12ff38543dd5d2c3533329080d2fb73e90f0e98e33"} Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.742109 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"52abf7d4ccdebca66eca1b67b807063a14f8054ac72e6274b7585334016a8a51"} Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.745918 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"cdbd7ccfbd2dda5c8e914cfc64d48a202c645b60787abb3380799caf918f44aa"} Jan 26 11:04:11 crc kubenswrapper[5003]: I0126 11:04:11.745982 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"f8c4785296e0f598b91e15594e02e9fea625d0b829a5ea6f7723c7befc440b9a"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.260153 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.268595 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"swift-proxy-67f6cc5479-w87g5\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.291405 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.598951 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5"] Jan 26 11:04:12 crc kubenswrapper[5003]: W0126 11:04:12.603811 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod168289f3_76bd_4518_8672_b02c64df8a27.slice/crio-edfae511caa0893bfd2366c7c38bf5f14741e8e5868e0947edb4d2b707b6ec1f WatchSource:0}: Error finding container edfae511caa0893bfd2366c7c38bf5f14741e8e5868e0947edb4d2b707b6ec1f: Status 404 returned error can't find the container with id edfae511caa0893bfd2366c7c38bf5f14741e8e5868e0947edb4d2b707b6ec1f Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.831201 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"2fc3c87be59a5ec7ba8a74b787b40113f9389d112d941fb7c2ed672c43b2edfd"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.831269 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"8223fc509969f6776b993e8185f9b069d5c9f086609a260402a84d4770684a11"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.831301 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"4ad9c01357ab348dfff940847ee95e8e56b30bd4d45d9f4e4af91de556a91141"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.831329 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"cd3ec3973c0fdea5b314fe57cccfee96424716c43decd2b932dcb5a277f50475"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.891635 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"c91ab6152d5ea9d28ccd73b6974a0f4d3883709c99852ba25bec0db8c9a6e5ac"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.891697 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"e34cb9c5f822beead5263fd88e158f66d3b44e29ea1a45b39948e974da9d3afa"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.891714 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"1d28eb610f8b253b7a6064cd6c60e01513782dca50d398abb69cf5666587bc32"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.891726 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"b07a59c52b0cbdde0c15d799dc68288f4ee14a18c067573b81f96b3508b94432"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.934740 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.934791 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.934806 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.934815 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1"} Jan 26 11:04:12 crc kubenswrapper[5003]: I0126 11:04:12.946501 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" event={"ID":"168289f3-76bd-4518-8672-b02c64df8a27","Type":"ContainerStarted","Data":"edfae511caa0893bfd2366c7c38bf5f14741e8e5868e0947edb4d2b707b6ec1f"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.959988 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"4c70dc4068dca5aa538646b5a5cf67b9678e3c5da2f2b07c409ac6f6c846c7e7"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.960439 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"dcf0b5ce28344896d5ad2a9cb5419af7078cf89fa9ce1120a5322d2a61e9b335"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.960453 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"e240db621933c2cd325c34006ef2c68ab817226ffc8733b8fcedc9e3f09f14fd"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.968616 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"bff9c806ed7cdb75c1f4838b9a094c93dc75ade4c30c0b29d24a1cf2577a3e99"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.969233 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"4afe6b66ed2fd13ad641a9847b636ca80bb77c44eeae3e5205cb9852b91e1c24"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.969626 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"3d339f409ffcb5325ad919da7c5d4cbda84d02d5b1e4439f03e3c1143cf7fe72"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.974258 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.974326 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.974340 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.978474 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" event={"ID":"168289f3-76bd-4518-8672-b02c64df8a27","Type":"ContainerStarted","Data":"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.978507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" event={"ID":"168289f3-76bd-4518-8672-b02c64df8a27","Type":"ContainerStarted","Data":"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2"} Jan 26 11:04:13 crc kubenswrapper[5003]: I0126 11:04:13.978661 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:14 crc kubenswrapper[5003]: I0126 11:04:14.002951 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" podStartSLOduration=18.002931482 podStartE2EDuration="18.002931482s" podCreationTimestamp="2026-01-26 11:03:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:14.000613586 +0000 UTC m=+1269.541839157" watchObservedRunningTime="2026-01-26 11:04:14.002931482 +0000 UTC m=+1269.544157113" Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.075811 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"f9a98aa39ad63d3e3cd260454a7a9de04082531369d39733e70305ac416cea9e"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.075872 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"97a7f4949dee21f7325aece319ca46ed43183cb5ea18e9253c3f974b31284e87"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.075886 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"f646834e6851b96fb382e1ef2015754dc2c7b2787e5b226395b64e3b11d176a1"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.075902 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"5c9caba665ad8644e75784ffcbd69d930eacb480b36e91cfd365eb7c9c96ae6f"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.075914 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.075928 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.075940 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.075953 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.143898 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.143963 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.143975 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.143983 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"aa3f16901f9fac2eeb4f0e98481575cc714ec4e56ed459a337209d6cae3ac9bd"} Jan 26 11:04:15 crc kubenswrapper[5003]: I0126 11:04:15.144164 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.158884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.159196 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.159207 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerStarted","Data":"07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.164756 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"527c335c96bbde9f354d78b445883d59eef8e57fa945fe3616c7ffa9bbd6770c"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.164791 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"b415c6a54f47f05402c3feef392d112113942b22d937deaa338e1bd319c82b54"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.164802 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerStarted","Data":"eab743ecc90895a84bc0ce0d7cc79df23a1ceb78f36452966772b7ab9eab3467"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.177386 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.177453 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.177468 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.177483 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerStarted","Data":"162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b"} Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.202716 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=22.202690316 podStartE2EDuration="22.202690316s" podCreationTimestamp="2026-01-26 11:03:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:16.198167396 +0000 UTC m=+1271.739392957" watchObservedRunningTime="2026-01-26 11:04:16.202690316 +0000 UTC m=+1271.743915877" Jan 26 11:04:16 crc kubenswrapper[5003]: I0126 11:04:16.240740 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-1" podStartSLOduration=22.240713995 podStartE2EDuration="22.240713995s" podCreationTimestamp="2026-01-26 11:03:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:16.233928361 +0000 UTC m=+1271.775153922" watchObservedRunningTime="2026-01-26 11:04:16.240713995 +0000 UTC m=+1271.781939556" Jan 26 11:04:17 crc kubenswrapper[5003]: I0126 11:04:17.298535 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:17 crc kubenswrapper[5003]: I0126 11:04:17.323087 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-2" podStartSLOduration=23.323067504 podStartE2EDuration="23.323067504s" podCreationTimestamp="2026-01-26 11:03:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:16.28452605 +0000 UTC m=+1271.825751621" watchObservedRunningTime="2026-01-26 11:04:17.323067504 +0000 UTC m=+1272.864293055" Jan 26 11:04:22 crc kubenswrapper[5003]: I0126 11:04:22.294781 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.045953 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz"] Jan 26 11:04:24 crc kubenswrapper[5003]: E0126 11:04:24.046612 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867792a2-0351-43c7-88c9-e28a5305d7de" containerName="swift-ring-rebalance" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.046626 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="867792a2-0351-43c7-88c9-e28a5305d7de" containerName="swift-ring-rebalance" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.046797 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="867792a2-0351-43c7-88c9-e28a5305d7de" containerName="swift-ring-rebalance" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.047384 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.050422 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.056826 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.060104 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz"] Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.167549 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-swiftconf\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.167622 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xljz\" (UniqueName: \"kubernetes.io/projected/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-kube-api-access-6xljz\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.167651 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-etc-swift\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.167840 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-scripts\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.167926 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-ring-data-devices\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.168038 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-dispersionconf\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.270398 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-swiftconf\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.270458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xljz\" (UniqueName: \"kubernetes.io/projected/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-kube-api-access-6xljz\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.270485 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-etc-swift\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.270544 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-scripts\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.270572 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-ring-data-devices\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.270613 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-dispersionconf\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.271320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-etc-swift\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.271843 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-ring-data-devices\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.271956 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-scripts\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.277578 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-dispersionconf\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.278744 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-swiftconf\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.294339 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xljz\" (UniqueName: \"kubernetes.io/projected/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-kube-api-access-6xljz\") pod \"swift-ring-rebalance-debug-lsfgz\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.373123 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:24 crc kubenswrapper[5003]: I0126 11:04:24.642572 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz"] Jan 26 11:04:24 crc kubenswrapper[5003]: W0126 11:04:24.659595 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cf37fc6_87a1_4028_abfe_a9b378b3bd89.slice/crio-8c058e3fc55f9f996c73522f6e1183ab92885ba6706a5075377ba0faf18772f3 WatchSource:0}: Error finding container 8c058e3fc55f9f996c73522f6e1183ab92885ba6706a5075377ba0faf18772f3: Status 404 returned error can't find the container with id 8c058e3fc55f9f996c73522f6e1183ab92885ba6706a5075377ba0faf18772f3 Jan 26 11:04:25 crc kubenswrapper[5003]: I0126 11:04:25.257929 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" event={"ID":"4cf37fc6-87a1-4028-abfe-a9b378b3bd89","Type":"ContainerStarted","Data":"b6fac47dc81b0501e6ce5074cb015e380e678269936ac3af998563cdda242697"} Jan 26 11:04:25 crc kubenswrapper[5003]: I0126 11:04:25.258221 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" event={"ID":"4cf37fc6-87a1-4028-abfe-a9b378b3bd89","Type":"ContainerStarted","Data":"8c058e3fc55f9f996c73522f6e1183ab92885ba6706a5075377ba0faf18772f3"} Jan 26 11:04:25 crc kubenswrapper[5003]: I0126 11:04:25.283370 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" podStartSLOduration=1.2833490269999999 podStartE2EDuration="1.283349027s" podCreationTimestamp="2026-01-26 11:04:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:25.27438028 +0000 UTC m=+1280.815605851" watchObservedRunningTime="2026-01-26 11:04:25.283349027 +0000 UTC m=+1280.824574588" Jan 26 11:04:27 crc kubenswrapper[5003]: I0126 11:04:27.280898 5003 generic.go:334] "Generic (PLEG): container finished" podID="4cf37fc6-87a1-4028-abfe-a9b378b3bd89" containerID="b6fac47dc81b0501e6ce5074cb015e380e678269936ac3af998563cdda242697" exitCode=0 Jan 26 11:04:27 crc kubenswrapper[5003]: I0126 11:04:27.280968 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" event={"ID":"4cf37fc6-87a1-4028-abfe-a9b378b3bd89","Type":"ContainerDied","Data":"b6fac47dc81b0501e6ce5074cb015e380e678269936ac3af998563cdda242697"} Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.608897 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.636821 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xljz\" (UniqueName: \"kubernetes.io/projected/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-kube-api-access-6xljz\") pod \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.636888 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-etc-swift\") pod \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.636915 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-scripts\") pod \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.636955 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-swiftconf\") pod \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.637052 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-ring-data-devices\") pod \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.637075 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-dispersionconf\") pod \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\" (UID: \"4cf37fc6-87a1-4028-abfe-a9b378b3bd89\") " Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.641959 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "4cf37fc6-87a1-4028-abfe-a9b378b3bd89" (UID: "4cf37fc6-87a1-4028-abfe-a9b378b3bd89"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.643403 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "4cf37fc6-87a1-4028-abfe-a9b378b3bd89" (UID: "4cf37fc6-87a1-4028-abfe-a9b378b3bd89"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.648327 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz"] Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.658622 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-kube-api-access-6xljz" (OuterVolumeSpecName: "kube-api-access-6xljz") pod "4cf37fc6-87a1-4028-abfe-a9b378b3bd89" (UID: "4cf37fc6-87a1-4028-abfe-a9b378b3bd89"). InnerVolumeSpecName "kube-api-access-6xljz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.665152 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz"] Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.665539 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-scripts" (OuterVolumeSpecName: "scripts") pod "4cf37fc6-87a1-4028-abfe-a9b378b3bd89" (UID: "4cf37fc6-87a1-4028-abfe-a9b378b3bd89"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.666147 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "4cf37fc6-87a1-4028-abfe-a9b378b3bd89" (UID: "4cf37fc6-87a1-4028-abfe-a9b378b3bd89"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.670334 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "4cf37fc6-87a1-4028-abfe-a9b378b3bd89" (UID: "4cf37fc6-87a1-4028-abfe-a9b378b3bd89"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.739388 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.739429 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.739444 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.739456 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xljz\" (UniqueName: \"kubernetes.io/projected/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-kube-api-access-6xljz\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.739469 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.739481 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4cf37fc6-87a1-4028-abfe-a9b378b3bd89-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.815892 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6"] Jan 26 11:04:28 crc kubenswrapper[5003]: E0126 11:04:28.818167 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cf37fc6-87a1-4028-abfe-a9b378b3bd89" containerName="swift-ring-rebalance" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.818192 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cf37fc6-87a1-4028-abfe-a9b378b3bd89" containerName="swift-ring-rebalance" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.818391 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cf37fc6-87a1-4028-abfe-a9b378b3bd89" containerName="swift-ring-rebalance" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.819008 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.829586 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6"] Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.840409 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-swiftconf\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.840458 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-scripts\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.840518 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-ring-data-devices\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.840560 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npnmq\" (UniqueName: \"kubernetes.io/projected/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-kube-api-access-npnmq\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.840591 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-dispersionconf\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.840629 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-etc-swift\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.942552 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-swiftconf\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.943194 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-scripts\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.943437 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-ring-data-devices\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.943542 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npnmq\" (UniqueName: \"kubernetes.io/projected/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-kube-api-access-npnmq\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.943631 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-dispersionconf\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.943744 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-etc-swift\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.944083 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-scripts\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.944653 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-etc-swift\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.944727 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-ring-data-devices\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.949464 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-dispersionconf\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.955237 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-swiftconf\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:28 crc kubenswrapper[5003]: I0126 11:04:28.963031 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npnmq\" (UniqueName: \"kubernetes.io/projected/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-kube-api-access-npnmq\") pod \"swift-ring-rebalance-debug-4rfk6\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:29 crc kubenswrapper[5003]: I0126 11:04:29.012224 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cf37fc6-87a1-4028-abfe-a9b378b3bd89" path="/var/lib/kubelet/pods/4cf37fc6-87a1-4028-abfe-a9b378b3bd89/volumes" Jan 26 11:04:29 crc kubenswrapper[5003]: I0126 11:04:29.143665 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:29 crc kubenswrapper[5003]: I0126 11:04:29.303780 5003 scope.go:117] "RemoveContainer" containerID="b6fac47dc81b0501e6ce5074cb015e380e678269936ac3af998563cdda242697" Jan 26 11:04:29 crc kubenswrapper[5003]: I0126 11:04:29.303887 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lsfgz" Jan 26 11:04:29 crc kubenswrapper[5003]: I0126 11:04:29.416258 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6"] Jan 26 11:04:30 crc kubenswrapper[5003]: I0126 11:04:30.313312 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" event={"ID":"d668b0b6-441c-481e-b6ca-0fcf6fdd0300","Type":"ContainerStarted","Data":"639652446ecfac50132457191c7713439a62795a8fe4b2ef83275373cb460471"} Jan 26 11:04:30 crc kubenswrapper[5003]: I0126 11:04:30.313589 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" event={"ID":"d668b0b6-441c-481e-b6ca-0fcf6fdd0300","Type":"ContainerStarted","Data":"5b6e9aa3b706b12b0fdfb046dcd16fadaa959119f435f4951226dc9398dfe124"} Jan 26 11:04:30 crc kubenswrapper[5003]: I0126 11:04:30.340507 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" podStartSLOduration=2.340486754 podStartE2EDuration="2.340486754s" podCreationTimestamp="2026-01-26 11:04:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:30.333116203 +0000 UTC m=+1285.874341784" watchObservedRunningTime="2026-01-26 11:04:30.340486754 +0000 UTC m=+1285.881712315" Jan 26 11:04:31 crc kubenswrapper[5003]: I0126 11:04:31.326613 5003 generic.go:334] "Generic (PLEG): container finished" podID="d668b0b6-441c-481e-b6ca-0fcf6fdd0300" containerID="639652446ecfac50132457191c7713439a62795a8fe4b2ef83275373cb460471" exitCode=0 Jan 26 11:04:31 crc kubenswrapper[5003]: I0126 11:04:31.326664 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" event={"ID":"d668b0b6-441c-481e-b6ca-0fcf6fdd0300","Type":"ContainerDied","Data":"639652446ecfac50132457191c7713439a62795a8fe4b2ef83275373cb460471"} Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.647678 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.681025 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6"] Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.687572 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6"] Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.800730 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-ring-data-devices\") pod \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.800890 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-scripts\") pod \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.801009 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npnmq\" (UniqueName: \"kubernetes.io/projected/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-kube-api-access-npnmq\") pod \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.801040 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-dispersionconf\") pod \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.801121 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-etc-swift\") pod \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.801168 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-swiftconf\") pod \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\" (UID: \"d668b0b6-441c-481e-b6ca-0fcf6fdd0300\") " Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.801440 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d668b0b6-441c-481e-b6ca-0fcf6fdd0300" (UID: "d668b0b6-441c-481e-b6ca-0fcf6fdd0300"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.802178 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d668b0b6-441c-481e-b6ca-0fcf6fdd0300" (UID: "d668b0b6-441c-481e-b6ca-0fcf6fdd0300"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.808746 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-kube-api-access-npnmq" (OuterVolumeSpecName: "kube-api-access-npnmq") pod "d668b0b6-441c-481e-b6ca-0fcf6fdd0300" (UID: "d668b0b6-441c-481e-b6ca-0fcf6fdd0300"). InnerVolumeSpecName "kube-api-access-npnmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.832155 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-scripts" (OuterVolumeSpecName: "scripts") pod "d668b0b6-441c-481e-b6ca-0fcf6fdd0300" (UID: "d668b0b6-441c-481e-b6ca-0fcf6fdd0300"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.834304 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d668b0b6-441c-481e-b6ca-0fcf6fdd0300" (UID: "d668b0b6-441c-481e-b6ca-0fcf6fdd0300"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.839803 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d668b0b6-441c-481e-b6ca-0fcf6fdd0300" (UID: "d668b0b6-441c-481e-b6ca-0fcf6fdd0300"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.902708 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.902745 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.902757 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.902765 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npnmq\" (UniqueName: \"kubernetes.io/projected/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-kube-api-access-npnmq\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.902775 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:32 crc kubenswrapper[5003]: I0126 11:04:32.902782 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d668b0b6-441c-481e-b6ca-0fcf6fdd0300-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.011347 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d668b0b6-441c-481e-b6ca-0fcf6fdd0300" path="/var/lib/kubelet/pods/d668b0b6-441c-481e-b6ca-0fcf6fdd0300/volumes" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.109033 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-98s8w"] Jan 26 11:04:33 crc kubenswrapper[5003]: E0126 11:04:33.109359 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d668b0b6-441c-481e-b6ca-0fcf6fdd0300" containerName="swift-ring-rebalance" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.109373 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d668b0b6-441c-481e-b6ca-0fcf6fdd0300" containerName="swift-ring-rebalance" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.109520 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d668b0b6-441c-481e-b6ca-0fcf6fdd0300" containerName="swift-ring-rebalance" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.110050 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.121330 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-98s8w"] Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.206961 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-scripts\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.207039 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-dispersionconf\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.207082 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-ring-data-devices\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.207393 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8w9d\" (UniqueName: \"kubernetes.io/projected/c12cf46e-3781-41f1-b96f-973a197b3092-kube-api-access-c8w9d\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.207443 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c12cf46e-3781-41f1-b96f-973a197b3092-etc-swift\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.207491 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-swiftconf\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.308652 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-dispersionconf\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.308767 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-ring-data-devices\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.308838 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8w9d\" (UniqueName: \"kubernetes.io/projected/c12cf46e-3781-41f1-b96f-973a197b3092-kube-api-access-c8w9d\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.308871 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c12cf46e-3781-41f1-b96f-973a197b3092-etc-swift\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.308915 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-swiftconf\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.308945 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-scripts\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.309402 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c12cf46e-3781-41f1-b96f-973a197b3092-etc-swift\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.309939 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-scripts\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.310061 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-ring-data-devices\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.314635 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-swiftconf\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.317698 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-dispersionconf\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.329499 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8w9d\" (UniqueName: \"kubernetes.io/projected/c12cf46e-3781-41f1-b96f-973a197b3092-kube-api-access-c8w9d\") pod \"swift-ring-rebalance-debug-98s8w\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.367652 5003 scope.go:117] "RemoveContainer" containerID="639652446ecfac50132457191c7713439a62795a8fe4b2ef83275373cb460471" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.367740 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-4rfk6" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.428703 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:33 crc kubenswrapper[5003]: I0126 11:04:33.847008 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-98s8w"] Jan 26 11:04:33 crc kubenswrapper[5003]: W0126 11:04:33.852106 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc12cf46e_3781_41f1_b96f_973a197b3092.slice/crio-b5f8f7841723c5ae7997f6fdb982fa6db39f2a854a4b63101722b8d98da9ddcd WatchSource:0}: Error finding container b5f8f7841723c5ae7997f6fdb982fa6db39f2a854a4b63101722b8d98da9ddcd: Status 404 returned error can't find the container with id b5f8f7841723c5ae7997f6fdb982fa6db39f2a854a4b63101722b8d98da9ddcd Jan 26 11:04:34 crc kubenswrapper[5003]: I0126 11:04:34.377718 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" event={"ID":"c12cf46e-3781-41f1-b96f-973a197b3092","Type":"ContainerStarted","Data":"44eb95174a9971d82c75beccef4e953696637bea5a347f6eaf3f257661d25582"} Jan 26 11:04:34 crc kubenswrapper[5003]: I0126 11:04:34.377855 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" event={"ID":"c12cf46e-3781-41f1-b96f-973a197b3092","Type":"ContainerStarted","Data":"b5f8f7841723c5ae7997f6fdb982fa6db39f2a854a4b63101722b8d98da9ddcd"} Jan 26 11:04:34 crc kubenswrapper[5003]: I0126 11:04:34.395067 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" podStartSLOduration=1.395040987 podStartE2EDuration="1.395040987s" podCreationTimestamp="2026-01-26 11:04:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:34.392519795 +0000 UTC m=+1289.933745396" watchObservedRunningTime="2026-01-26 11:04:34.395040987 +0000 UTC m=+1289.936266548" Jan 26 11:04:36 crc kubenswrapper[5003]: I0126 11:04:36.397518 5003 generic.go:334] "Generic (PLEG): container finished" podID="c12cf46e-3781-41f1-b96f-973a197b3092" containerID="44eb95174a9971d82c75beccef4e953696637bea5a347f6eaf3f257661d25582" exitCode=0 Jan 26 11:04:36 crc kubenswrapper[5003]: I0126 11:04:36.397604 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" event={"ID":"c12cf46e-3781-41f1-b96f-973a197b3092","Type":"ContainerDied","Data":"44eb95174a9971d82c75beccef4e953696637bea5a347f6eaf3f257661d25582"} Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.705987 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.762175 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-98s8w"] Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.768248 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-98s8w"] Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.880859 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8w9d\" (UniqueName: \"kubernetes.io/projected/c12cf46e-3781-41f1-b96f-973a197b3092-kube-api-access-c8w9d\") pod \"c12cf46e-3781-41f1-b96f-973a197b3092\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.881058 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-scripts\") pod \"c12cf46e-3781-41f1-b96f-973a197b3092\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.882049 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-dispersionconf\") pod \"c12cf46e-3781-41f1-b96f-973a197b3092\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.882135 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c12cf46e-3781-41f1-b96f-973a197b3092-etc-swift\") pod \"c12cf46e-3781-41f1-b96f-973a197b3092\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.882182 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-ring-data-devices\") pod \"c12cf46e-3781-41f1-b96f-973a197b3092\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.882218 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-swiftconf\") pod \"c12cf46e-3781-41f1-b96f-973a197b3092\" (UID: \"c12cf46e-3781-41f1-b96f-973a197b3092\") " Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.882739 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "c12cf46e-3781-41f1-b96f-973a197b3092" (UID: "c12cf46e-3781-41f1-b96f-973a197b3092"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.883021 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c12cf46e-3781-41f1-b96f-973a197b3092-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "c12cf46e-3781-41f1-b96f-973a197b3092" (UID: "c12cf46e-3781-41f1-b96f-973a197b3092"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.887418 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c12cf46e-3781-41f1-b96f-973a197b3092-kube-api-access-c8w9d" (OuterVolumeSpecName: "kube-api-access-c8w9d") pod "c12cf46e-3781-41f1-b96f-973a197b3092" (UID: "c12cf46e-3781-41f1-b96f-973a197b3092"). InnerVolumeSpecName "kube-api-access-c8w9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.904298 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "c12cf46e-3781-41f1-b96f-973a197b3092" (UID: "c12cf46e-3781-41f1-b96f-973a197b3092"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.906088 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-scripts" (OuterVolumeSpecName: "scripts") pod "c12cf46e-3781-41f1-b96f-973a197b3092" (UID: "c12cf46e-3781-41f1-b96f-973a197b3092"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.908659 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "c12cf46e-3781-41f1-b96f-973a197b3092" (UID: "c12cf46e-3781-41f1-b96f-973a197b3092"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.983476 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8w9d\" (UniqueName: \"kubernetes.io/projected/c12cf46e-3781-41f1-b96f-973a197b3092-kube-api-access-c8w9d\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.983863 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.983876 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.983888 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c12cf46e-3781-41f1-b96f-973a197b3092-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.983899 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c12cf46e-3781-41f1-b96f-973a197b3092-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:37 crc kubenswrapper[5003]: I0126 11:04:37.983908 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c12cf46e-3781-41f1-b96f-973a197b3092-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.416224 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5f8f7841723c5ae7997f6fdb982fa6db39f2a854a4b63101722b8d98da9ddcd" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.416312 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-98s8w" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.918482 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g"] Jan 26 11:04:38 crc kubenswrapper[5003]: E0126 11:04:38.918869 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c12cf46e-3781-41f1-b96f-973a197b3092" containerName="swift-ring-rebalance" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.918886 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c12cf46e-3781-41f1-b96f-973a197b3092" containerName="swift-ring-rebalance" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.919117 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c12cf46e-3781-41f1-b96f-973a197b3092" containerName="swift-ring-rebalance" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.919797 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.922522 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.923808 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:04:38 crc kubenswrapper[5003]: I0126 11:04:38.929937 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g"] Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.021249 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c12cf46e-3781-41f1-b96f-973a197b3092" path="/var/lib/kubelet/pods/c12cf46e-3781-41f1-b96f-973a197b3092/volumes" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.040623 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.040706 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.100110 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgh9d\" (UniqueName: \"kubernetes.io/projected/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-kube-api-access-sgh9d\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.100175 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-swiftconf\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.100198 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-scripts\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.100704 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-dispersionconf\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.100911 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-etc-swift\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.100955 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-ring-data-devices\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.202639 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-etc-swift\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.202738 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-ring-data-devices\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.202823 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgh9d\" (UniqueName: \"kubernetes.io/projected/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-kube-api-access-sgh9d\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.202855 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-swiftconf\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.202899 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-scripts\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.202968 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-dispersionconf\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.203155 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-etc-swift\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.203640 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-ring-data-devices\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.203926 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-scripts\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.207419 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-dispersionconf\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.209777 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-swiftconf\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.220053 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgh9d\" (UniqueName: \"kubernetes.io/projected/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-kube-api-access-sgh9d\") pod \"swift-ring-rebalance-debug-hsz8g\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.239949 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:39 crc kubenswrapper[5003]: I0126 11:04:39.667269 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g"] Jan 26 11:04:39 crc kubenswrapper[5003]: W0126 11:04:39.669032 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee8e44f5_8ea1_4a95_9bdf_7d56c6a38db4.slice/crio-7da07857547d7c9e4fc66e218335afc7068c94ba1b9df34e627d911dac11be7e WatchSource:0}: Error finding container 7da07857547d7c9e4fc66e218335afc7068c94ba1b9df34e627d911dac11be7e: Status 404 returned error can't find the container with id 7da07857547d7c9e4fc66e218335afc7068c94ba1b9df34e627d911dac11be7e Jan 26 11:04:40 crc kubenswrapper[5003]: I0126 11:04:40.435689 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" event={"ID":"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4","Type":"ContainerStarted","Data":"a3e9bb68fd5364ac306256f0901236db32ecc139a99006907d1a35747931de7d"} Jan 26 11:04:40 crc kubenswrapper[5003]: I0126 11:04:40.436309 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" event={"ID":"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4","Type":"ContainerStarted","Data":"7da07857547d7c9e4fc66e218335afc7068c94ba1b9df34e627d911dac11be7e"} Jan 26 11:04:40 crc kubenswrapper[5003]: I0126 11:04:40.458962 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" podStartSLOduration=2.458942477 podStartE2EDuration="2.458942477s" podCreationTimestamp="2026-01-26 11:04:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:40.450097133 +0000 UTC m=+1295.991322684" watchObservedRunningTime="2026-01-26 11:04:40.458942477 +0000 UTC m=+1296.000168038" Jan 26 11:04:42 crc kubenswrapper[5003]: I0126 11:04:42.455270 5003 generic.go:334] "Generic (PLEG): container finished" podID="ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" containerID="a3e9bb68fd5364ac306256f0901236db32ecc139a99006907d1a35747931de7d" exitCode=0 Jan 26 11:04:42 crc kubenswrapper[5003]: I0126 11:04:42.455315 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" event={"ID":"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4","Type":"ContainerDied","Data":"a3e9bb68fd5364ac306256f0901236db32ecc139a99006907d1a35747931de7d"} Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.780820 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.825651 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g"] Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.833929 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g"] Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.972244 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgh9d\" (UniqueName: \"kubernetes.io/projected/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-kube-api-access-sgh9d\") pod \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.972336 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-swiftconf\") pod \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.972380 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-dispersionconf\") pod \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.972416 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-etc-swift\") pod \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.972445 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-ring-data-devices\") pod \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.972592 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-scripts\") pod \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\" (UID: \"ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4\") " Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.973550 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" (UID: "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.973623 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" (UID: "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.980199 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-kube-api-access-sgh9d" (OuterVolumeSpecName: "kube-api-access-sgh9d") pod "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" (UID: "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4"). InnerVolumeSpecName "kube-api-access-sgh9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.998079 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-scripts" (OuterVolumeSpecName: "scripts") pod "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" (UID: "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.998361 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" (UID: "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:43 crc kubenswrapper[5003]: I0126 11:04:43.999865 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" (UID: "ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.074713 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.075260 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgh9d\" (UniqueName: \"kubernetes.io/projected/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-kube-api-access-sgh9d\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.075312 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.075327 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.075340 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.075353 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.474098 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7da07857547d7c9e4fc66e218335afc7068c94ba1b9df34e627d911dac11be7e" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.474147 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hsz8g" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.986098 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl"] Jan 26 11:04:44 crc kubenswrapper[5003]: E0126 11:04:44.986538 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" containerName="swift-ring-rebalance" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.986556 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" containerName="swift-ring-rebalance" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.988429 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" containerName="swift-ring-rebalance" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.989269 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.991593 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.991843 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:04:44 crc kubenswrapper[5003]: I0126 11:04:44.995263 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl"] Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.019303 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4" path="/var/lib/kubelet/pods/ee8e44f5-8ea1-4a95-9bdf-7d56c6a38db4/volumes" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.090147 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-dispersionconf\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.090360 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-scripts\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.090570 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-swiftconf\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.090621 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d303cb3-63f2-4130-a35f-000b31b5d414-etc-swift\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.090689 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5d6z\" (UniqueName: \"kubernetes.io/projected/2d303cb3-63f2-4130-a35f-000b31b5d414-kube-api-access-c5d6z\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.090779 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-ring-data-devices\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.191496 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-scripts\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.191608 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-swiftconf\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.191649 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d303cb3-63f2-4130-a35f-000b31b5d414-etc-swift\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.191680 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5d6z\" (UniqueName: \"kubernetes.io/projected/2d303cb3-63f2-4130-a35f-000b31b5d414-kube-api-access-c5d6z\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.191719 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-ring-data-devices\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.191759 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-dispersionconf\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.192614 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d303cb3-63f2-4130-a35f-000b31b5d414-etc-swift\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.192647 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-ring-data-devices\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.192719 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-scripts\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.198136 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-swiftconf\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.199861 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-dispersionconf\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.216171 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5d6z\" (UniqueName: \"kubernetes.io/projected/2d303cb3-63f2-4130-a35f-000b31b5d414-kube-api-access-c5d6z\") pod \"swift-ring-rebalance-debug-jl4gl\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.321435 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:45 crc kubenswrapper[5003]: I0126 11:04:45.768235 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl"] Jan 26 11:04:46 crc kubenswrapper[5003]: I0126 11:04:46.503344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" event={"ID":"2d303cb3-63f2-4130-a35f-000b31b5d414","Type":"ContainerStarted","Data":"def8593f5dc4e5b190a67ba07275990c971b8f57a48205aeed28d7799359a1cd"} Jan 26 11:04:46 crc kubenswrapper[5003]: I0126 11:04:46.503491 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" event={"ID":"2d303cb3-63f2-4130-a35f-000b31b5d414","Type":"ContainerStarted","Data":"64e320a71de59d892c7593d0ef6f674d6c9a340d90d9f54a030ff63e9389505a"} Jan 26 11:04:46 crc kubenswrapper[5003]: I0126 11:04:46.531051 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" podStartSLOduration=2.531021552 podStartE2EDuration="2.531021552s" podCreationTimestamp="2026-01-26 11:04:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:04:46.530239939 +0000 UTC m=+1302.071465510" watchObservedRunningTime="2026-01-26 11:04:46.531021552 +0000 UTC m=+1302.072247113" Jan 26 11:04:48 crc kubenswrapper[5003]: I0126 11:04:48.521116 5003 generic.go:334] "Generic (PLEG): container finished" podID="2d303cb3-63f2-4130-a35f-000b31b5d414" containerID="def8593f5dc4e5b190a67ba07275990c971b8f57a48205aeed28d7799359a1cd" exitCode=0 Jan 26 11:04:48 crc kubenswrapper[5003]: I0126 11:04:48.521188 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" event={"ID":"2d303cb3-63f2-4130-a35f-000b31b5d414","Type":"ContainerDied","Data":"def8593f5dc4e5b190a67ba07275990c971b8f57a48205aeed28d7799359a1cd"} Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.831805 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.871580 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl"] Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.879123 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl"] Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.965204 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-swiftconf\") pod \"2d303cb3-63f2-4130-a35f-000b31b5d414\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.965322 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d303cb3-63f2-4130-a35f-000b31b5d414-etc-swift\") pod \"2d303cb3-63f2-4130-a35f-000b31b5d414\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.965362 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-scripts\") pod \"2d303cb3-63f2-4130-a35f-000b31b5d414\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.965398 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-dispersionconf\") pod \"2d303cb3-63f2-4130-a35f-000b31b5d414\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.965474 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-ring-data-devices\") pod \"2d303cb3-63f2-4130-a35f-000b31b5d414\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.965519 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5d6z\" (UniqueName: \"kubernetes.io/projected/2d303cb3-63f2-4130-a35f-000b31b5d414-kube-api-access-c5d6z\") pod \"2d303cb3-63f2-4130-a35f-000b31b5d414\" (UID: \"2d303cb3-63f2-4130-a35f-000b31b5d414\") " Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.967323 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2d303cb3-63f2-4130-a35f-000b31b5d414" (UID: "2d303cb3-63f2-4130-a35f-000b31b5d414"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.967635 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d303cb3-63f2-4130-a35f-000b31b5d414-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2d303cb3-63f2-4130-a35f-000b31b5d414" (UID: "2d303cb3-63f2-4130-a35f-000b31b5d414"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:04:49 crc kubenswrapper[5003]: I0126 11:04:49.984804 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d303cb3-63f2-4130-a35f-000b31b5d414-kube-api-access-c5d6z" (OuterVolumeSpecName: "kube-api-access-c5d6z") pod "2d303cb3-63f2-4130-a35f-000b31b5d414" (UID: "2d303cb3-63f2-4130-a35f-000b31b5d414"). InnerVolumeSpecName "kube-api-access-c5d6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.017114 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.017740 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-server" containerID="cri-o://cdbd7ccfbd2dda5c8e914cfc64d48a202c645b60787abb3380799caf918f44aa" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.018137 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="swift-recon-cron" containerID="cri-o://b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.018196 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="rsync" containerID="cri-o://18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.018235 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-expirer" containerID="cri-o://07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.018268 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-updater" containerID="cri-o://bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.018328 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-auditor" containerID="cri-o://c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.018360 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-replicator" containerID="cri-o://178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.018390 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-server" containerID="cri-o://aa3f16901f9fac2eeb4f0e98481575cc714ec4e56ed459a337209d6cae3ac9bd" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.018837 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-reaper" containerID="cri-o://8223fc509969f6776b993e8185f9b069d5c9f086609a260402a84d4770684a11" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.019011 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-auditor" containerID="cri-o://dcf0b5ce28344896d5ad2a9cb5419af7078cf89fa9ce1120a5322d2a61e9b335" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.019067 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-replicator" containerID="cri-o://e240db621933c2cd325c34006ef2c68ab817226ffc8733b8fcedc9e3f09f14fd" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.019111 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-server" containerID="cri-o://2fc3c87be59a5ec7ba8a74b787b40113f9389d112d941fb7c2ed672c43b2edfd" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.019163 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-replicator" containerID="cri-o://cd3ec3973c0fdea5b314fe57cccfee96424716c43decd2b932dcb5a277f50475" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.019206 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-auditor" containerID="cri-o://4ad9c01357ab348dfff940847ee95e8e56b30bd4d45d9f4e4af91de556a91141" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.026128 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-updater" containerID="cri-o://4c70dc4068dca5aa538646b5a5cf67b9678e3c5da2f2b07c409ac6f6c846c7e7" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.034878 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-scripts" (OuterVolumeSpecName: "scripts") pod "2d303cb3-63f2-4130-a35f-000b31b5d414" (UID: "2d303cb3-63f2-4130-a35f-000b31b5d414"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.042733 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043264 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-server" containerID="cri-o://fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043459 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-updater" containerID="cri-o://73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043496 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-auditor" containerID="cri-o://8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043529 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-replicator" containerID="cri-o://8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043493 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-server" containerID="cri-o://ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043560 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-server" containerID="cri-o://9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043591 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-reaper" containerID="cri-o://76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043639 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-auditor" containerID="cri-o://4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043676 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-replicator" containerID="cri-o://5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043808 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-expirer" containerID="cri-o://f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043851 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="swift-recon-cron" containerID="cri-o://6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043884 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="rsync" containerID="cri-o://7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043912 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-updater" containerID="cri-o://162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.043970 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-auditor" containerID="cri-o://f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.044010 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-replicator" containerID="cri-o://a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.064458 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.064957 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-server" containerID="cri-o://23f16ba6e5ba6a363d0ee9ae965d1b10aa7a42784a6480cb45492d47033a7ba8" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065385 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="swift-recon-cron" containerID="cri-o://527c335c96bbde9f354d78b445883d59eef8e57fa945fe3616c7ffa9bbd6770c" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065435 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="rsync" containerID="cri-o://b415c6a54f47f05402c3feef392d112113942b22d937deaa338e1bd319c82b54" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065468 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-expirer" containerID="cri-o://eab743ecc90895a84bc0ce0d7cc79df23a1ceb78f36452966772b7ab9eab3467" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065499 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-updater" containerID="cri-o://f9a98aa39ad63d3e3cd260454a7a9de04082531369d39733e70305ac416cea9e" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065536 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-auditor" containerID="cri-o://97a7f4949dee21f7325aece319ca46ed43183cb5ea18e9253c3f974b31284e87" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065565 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-replicator" containerID="cri-o://f646834e6851b96fb382e1ef2015754dc2c7b2787e5b226395b64e3b11d176a1" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065596 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-server" containerID="cri-o://5c9caba665ad8644e75784ffcbd69d930eacb480b36e91cfd365eb7c9c96ae6f" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065628 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-updater" containerID="cri-o://bff9c806ed7cdb75c1f4838b9a094c93dc75ade4c30c0b29d24a1cf2577a3e99" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065665 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-auditor" containerID="cri-o://4afe6b66ed2fd13ad641a9847b636ca80bb77c44eeae3e5205cb9852b91e1c24" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065698 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-replicator" containerID="cri-o://3d339f409ffcb5325ad919da7c5d4cbda84d02d5b1e4439f03e3c1143cf7fe72" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065729 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-server" containerID="cri-o://c91ab6152d5ea9d28ccd73b6974a0f4d3883709c99852ba25bec0db8c9a6e5ac" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065760 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-reaper" containerID="cri-o://e34cb9c5f822beead5263fd88e158f66d3b44e29ea1a45b39948e974da9d3afa" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065791 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-auditor" containerID="cri-o://1d28eb610f8b253b7a6064cd6c60e01513782dca50d398abb69cf5666587bc32" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.065819 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-replicator" containerID="cri-o://b07a59c52b0cbdde0c15d799dc68288f4ee14a18c067573b81f96b3508b94432" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.070148 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2d303cb3-63f2-4130-a35f-000b31b5d414" (UID: "2d303cb3-63f2-4130-a35f-000b31b5d414"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.071372 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.071402 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5d6z\" (UniqueName: \"kubernetes.io/projected/2d303cb3-63f2-4130-a35f-000b31b5d414-kube-api-access-c5d6z\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.071414 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.071422 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d303cb3-63f2-4130-a35f-000b31b5d414-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.071430 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d303cb3-63f2-4130-a35f-000b31b5d414-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.089767 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-bzq42"] Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.099703 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-bzq42"] Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.101438 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2d303cb3-63f2-4130-a35f-000b31b5d414" (UID: "2d303cb3-63f2-4130-a35f-000b31b5d414"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.107301 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5"] Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.107581 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" podUID="168289f3-76bd-4518-8672-b02c64df8a27" containerName="proxy-httpd" containerID="cri-o://b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.108125 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" podUID="168289f3-76bd-4518-8672-b02c64df8a27" containerName="proxy-server" containerID="cri-o://4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d" gracePeriod=30 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.176731 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d303cb3-63f2-4130-a35f-000b31b5d414-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564699 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564738 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564747 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564754 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564765 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564774 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="4c70dc4068dca5aa538646b5a5cf67b9678e3c5da2f2b07c409ac6f6c846c7e7" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564782 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="dcf0b5ce28344896d5ad2a9cb5419af7078cf89fa9ce1120a5322d2a61e9b335" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564789 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="e240db621933c2cd325c34006ef2c68ab817226ffc8733b8fcedc9e3f09f14fd" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564796 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="2fc3c87be59a5ec7ba8a74b787b40113f9389d112d941fb7c2ed672c43b2edfd" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564803 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="8223fc509969f6776b993e8185f9b069d5c9f086609a260402a84d4770684a11" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564811 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="4ad9c01357ab348dfff940847ee95e8e56b30bd4d45d9f4e4af91de556a91141" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564791 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564868 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564885 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564897 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564818 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="cd3ec3973c0fdea5b314fe57cccfee96424716c43decd2b932dcb5a277f50475" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564914 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564927 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"4c70dc4068dca5aa538646b5a5cf67b9678e3c5da2f2b07c409ac6f6c846c7e7"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564941 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"dcf0b5ce28344896d5ad2a9cb5419af7078cf89fa9ce1120a5322d2a61e9b335"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564953 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"e240db621933c2cd325c34006ef2c68ab817226ffc8733b8fcedc9e3f09f14fd"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564964 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"2fc3c87be59a5ec7ba8a74b787b40113f9389d112d941fb7c2ed672c43b2edfd"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564979 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"8223fc509969f6776b993e8185f9b069d5c9f086609a260402a84d4770684a11"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.564991 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"4ad9c01357ab348dfff940847ee95e8e56b30bd4d45d9f4e4af91de556a91141"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.565004 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"cd3ec3973c0fdea5b314fe57cccfee96424716c43decd2b932dcb5a277f50475"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.570185 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64e320a71de59d892c7593d0ef6f674d6c9a340d90d9f54a030ff63e9389505a" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.570239 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jl4gl" Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596007 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="eab743ecc90895a84bc0ce0d7cc79df23a1ceb78f36452966772b7ab9eab3467" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596039 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="f9a98aa39ad63d3e3cd260454a7a9de04082531369d39733e70305ac416cea9e" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596047 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="97a7f4949dee21f7325aece319ca46ed43183cb5ea18e9253c3f974b31284e87" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596053 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="f646834e6851b96fb382e1ef2015754dc2c7b2787e5b226395b64e3b11d176a1" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596063 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="bff9c806ed7cdb75c1f4838b9a094c93dc75ade4c30c0b29d24a1cf2577a3e99" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596069 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="4afe6b66ed2fd13ad641a9847b636ca80bb77c44eeae3e5205cb9852b91e1c24" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596077 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="3d339f409ffcb5325ad919da7c5d4cbda84d02d5b1e4439f03e3c1143cf7fe72" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596084 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="c91ab6152d5ea9d28ccd73b6974a0f4d3883709c99852ba25bec0db8c9a6e5ac" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596090 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="e34cb9c5f822beead5263fd88e158f66d3b44e29ea1a45b39948e974da9d3afa" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596099 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="1d28eb610f8b253b7a6064cd6c60e01513782dca50d398abb69cf5666587bc32" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596106 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="b07a59c52b0cbdde0c15d799dc68288f4ee14a18c067573b81f96b3508b94432" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596187 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"eab743ecc90895a84bc0ce0d7cc79df23a1ceb78f36452966772b7ab9eab3467"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596260 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"f9a98aa39ad63d3e3cd260454a7a9de04082531369d39733e70305ac416cea9e"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596311 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"97a7f4949dee21f7325aece319ca46ed43183cb5ea18e9253c3f974b31284e87"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596324 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"f646834e6851b96fb382e1ef2015754dc2c7b2787e5b226395b64e3b11d176a1"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596335 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"bff9c806ed7cdb75c1f4838b9a094c93dc75ade4c30c0b29d24a1cf2577a3e99"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596346 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"4afe6b66ed2fd13ad641a9847b636ca80bb77c44eeae3e5205cb9852b91e1c24"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596357 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"3d339f409ffcb5325ad919da7c5d4cbda84d02d5b1e4439f03e3c1143cf7fe72"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596368 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"c91ab6152d5ea9d28ccd73b6974a0f4d3883709c99852ba25bec0db8c9a6e5ac"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596379 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"e34cb9c5f822beead5263fd88e158f66d3b44e29ea1a45b39948e974da9d3afa"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596391 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"1d28eb610f8b253b7a6064cd6c60e01513782dca50d398abb69cf5666587bc32"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.596401 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"b07a59c52b0cbdde0c15d799dc68288f4ee14a18c067573b81f96b3508b94432"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603178 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603223 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603240 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603252 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603263 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603256 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603341 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603355 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603365 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603375 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603385 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603273 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603434 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603490 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603500 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603507 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320" exitCode=0 Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603521 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603548 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603558 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53"} Jan 26 11:04:50 crc kubenswrapper[5003]: I0126 11:04:50.603566 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.055057 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d303cb3-63f2-4130-a35f-000b31b5d414" path="/var/lib/kubelet/pods/2d303cb3-63f2-4130-a35f-000b31b5d414/volumes" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.056239 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="867792a2-0351-43c7-88c9-e28a5305d7de" path="/var/lib/kubelet/pods/867792a2-0351-43c7-88c9-e28a5305d7de/volumes" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.304421 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.495159 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") pod \"168289f3-76bd-4518-8672-b02c64df8a27\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.495307 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-run-httpd\") pod \"168289f3-76bd-4518-8672-b02c64df8a27\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.495365 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168289f3-76bd-4518-8672-b02c64df8a27-config-data\") pod \"168289f3-76bd-4518-8672-b02c64df8a27\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.496014 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "168289f3-76bd-4518-8672-b02c64df8a27" (UID: "168289f3-76bd-4518-8672-b02c64df8a27"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.496209 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-log-httpd\") pod \"168289f3-76bd-4518-8672-b02c64df8a27\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.496672 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "168289f3-76bd-4518-8672-b02c64df8a27" (UID: "168289f3-76bd-4518-8672-b02c64df8a27"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.496246 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxt2x\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-kube-api-access-hxt2x\") pod \"168289f3-76bd-4518-8672-b02c64df8a27\" (UID: \"168289f3-76bd-4518-8672-b02c64df8a27\") " Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.497217 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.497234 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/168289f3-76bd-4518-8672-b02c64df8a27-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.501994 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "168289f3-76bd-4518-8672-b02c64df8a27" (UID: "168289f3-76bd-4518-8672-b02c64df8a27"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.502308 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-kube-api-access-hxt2x" (OuterVolumeSpecName: "kube-api-access-hxt2x") pod "168289f3-76bd-4518-8672-b02c64df8a27" (UID: "168289f3-76bd-4518-8672-b02c64df8a27"). InnerVolumeSpecName "kube-api-access-hxt2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.533997 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/168289f3-76bd-4518-8672-b02c64df8a27-config-data" (OuterVolumeSpecName: "config-data") pod "168289f3-76bd-4518-8672-b02c64df8a27" (UID: "168289f3-76bd-4518-8672-b02c64df8a27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.598914 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxt2x\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-kube-api-access-hxt2x\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.598952 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/168289f3-76bd-4518-8672-b02c64df8a27-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.598962 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168289f3-76bd-4518-8672-b02c64df8a27-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.622301 5003 generic.go:334] "Generic (PLEG): container finished" podID="168289f3-76bd-4518-8672-b02c64df8a27" containerID="4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.622340 5003 generic.go:334] "Generic (PLEG): container finished" podID="168289f3-76bd-4518-8672-b02c64df8a27" containerID="b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.622401 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" event={"ID":"168289f3-76bd-4518-8672-b02c64df8a27","Type":"ContainerDied","Data":"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.622471 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" event={"ID":"168289f3-76bd-4518-8672-b02c64df8a27","Type":"ContainerDied","Data":"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.622483 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" event={"ID":"168289f3-76bd-4518-8672-b02c64df8a27","Type":"ContainerDied","Data":"edfae511caa0893bfd2366c7c38bf5f14741e8e5868e0947edb4d2b707b6ec1f"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.622407 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.628050 5003 scope.go:117] "RemoveContainer" containerID="4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.631540 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="aa3f16901f9fac2eeb4f0e98481575cc714ec4e56ed459a337209d6cae3ac9bd" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.631561 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="cdbd7ccfbd2dda5c8e914cfc64d48a202c645b60787abb3380799caf918f44aa" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.631603 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"aa3f16901f9fac2eeb4f0e98481575cc714ec4e56ed459a337209d6cae3ac9bd"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.631631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"cdbd7ccfbd2dda5c8e914cfc64d48a202c645b60787abb3380799caf918f44aa"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.642475 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="b415c6a54f47f05402c3feef392d112113942b22d937deaa338e1bd319c82b54" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.642510 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="5c9caba665ad8644e75784ffcbd69d930eacb480b36e91cfd365eb7c9c96ae6f" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.642519 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="23f16ba6e5ba6a363d0ee9ae965d1b10aa7a42784a6480cb45492d47033a7ba8" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.642564 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"b415c6a54f47f05402c3feef392d112113942b22d937deaa338e1bd319c82b54"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.642594 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"5c9caba665ad8644e75784ffcbd69d930eacb480b36e91cfd365eb7c9c96ae6f"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.642605 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"23f16ba6e5ba6a363d0ee9ae965d1b10aa7a42784a6480cb45492d47033a7ba8"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.656011 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.656051 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.656061 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.656070 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1" exitCode=0 Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.656068 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.656117 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.656131 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.656145 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1"} Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.660957 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5"] Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.664103 5003 scope.go:117] "RemoveContainer" containerID="b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.668136 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-67f6cc5479-w87g5"] Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.681842 5003 scope.go:117] "RemoveContainer" containerID="4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d" Jan 26 11:04:51 crc kubenswrapper[5003]: E0126 11:04:51.682323 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d\": container with ID starting with 4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d not found: ID does not exist" containerID="4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.682362 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d"} err="failed to get container status \"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d\": rpc error: code = NotFound desc = could not find container \"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d\": container with ID starting with 4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d not found: ID does not exist" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.682385 5003 scope.go:117] "RemoveContainer" containerID="b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2" Jan 26 11:04:51 crc kubenswrapper[5003]: E0126 11:04:51.682618 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2\": container with ID starting with b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2 not found: ID does not exist" containerID="b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.682646 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2"} err="failed to get container status \"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2\": rpc error: code = NotFound desc = could not find container \"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2\": container with ID starting with b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2 not found: ID does not exist" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.682660 5003 scope.go:117] "RemoveContainer" containerID="4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.682922 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d"} err="failed to get container status \"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d\": rpc error: code = NotFound desc = could not find container \"4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d\": container with ID starting with 4369e1000c54f50e9b8dac24d89ef63ebe424904ee457f369e66ef336eb4fc5d not found: ID does not exist" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.682968 5003 scope.go:117] "RemoveContainer" containerID="b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2" Jan 26 11:04:51 crc kubenswrapper[5003]: I0126 11:04:51.683335 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2"} err="failed to get container status \"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2\": rpc error: code = NotFound desc = could not find container \"b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2\": container with ID starting with b9a2c76b4ef7bf090acc9d77429d83605d958090a843ebfb02e4799f93758cf2 not found: ID does not exist" Jan 26 11:04:53 crc kubenswrapper[5003]: I0126 11:04:53.012031 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="168289f3-76bd-4518-8672-b02c64df8a27" path="/var/lib/kubelet/pods/168289f3-76bd-4518-8672-b02c64df8a27/volumes" Jan 26 11:05:09 crc kubenswrapper[5003]: I0126 11:05:09.040203 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:05:09 crc kubenswrapper[5003]: I0126 11:05:09.041324 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:05:20 crc kubenswrapper[5003]: E0126 11:05:20.334760 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85e899d6_b28a_4b65_bbed_90648be93627.slice/crio-conmon-527c335c96bbde9f354d78b445883d59eef8e57fa945fe3616c7ffa9bbd6770c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod510b1209_97a2_4a4c_bc62_60d5c9ce6bcd.slice/crio-b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf66de5f0_1dc9_497e_828e_563484b9f60e.slice/crio-conmon-6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod510b1209_97a2_4a4c_bc62_60d5c9ce6bcd.slice/crio-conmon-b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6.scope\": RecentStats: unable to find data in memory cache]" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.528676 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.529859 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.534248 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.690965 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-lock\") pod \"85e899d6-b28a-4b65-bbed-90648be93627\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691026 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-cache\") pod \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691064 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwjgs\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-kube-api-access-jwjgs\") pod \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691101 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-cache\") pod \"f66de5f0-1dc9-497e-828e-563484b9f60e\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691130 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-cache\") pod \"85e899d6-b28a-4b65-bbed-90648be93627\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691151 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") pod \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"85e899d6-b28a-4b65-bbed-90648be93627\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691216 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") pod \"85e899d6-b28a-4b65-bbed-90648be93627\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691294 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"f66de5f0-1dc9-497e-828e-563484b9f60e\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691312 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2z52\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-kube-api-access-d2z52\") pod \"f66de5f0-1dc9-497e-828e-563484b9f60e\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691337 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xms9z\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-kube-api-access-xms9z\") pod \"85e899d6-b28a-4b65-bbed-90648be93627\" (UID: \"85e899d6-b28a-4b65-bbed-90648be93627\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691365 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691407 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-lock\") pod \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\" (UID: \"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691429 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-lock\") pod \"f66de5f0-1dc9-497e-828e-563484b9f60e\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691449 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") pod \"f66de5f0-1dc9-497e-828e-563484b9f60e\" (UID: \"f66de5f0-1dc9-497e-828e-563484b9f60e\") " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691601 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-lock" (OuterVolumeSpecName: "lock") pod "85e899d6-b28a-4b65-bbed-90648be93627" (UID: "85e899d6-b28a-4b65-bbed-90648be93627"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.691850 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.692174 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-lock" (OuterVolumeSpecName: "lock") pod "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.692453 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-cache" (OuterVolumeSpecName: "cache") pod "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.692453 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-cache" (OuterVolumeSpecName: "cache") pod "f66de5f0-1dc9-497e-828e-563484b9f60e" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.692505 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-lock" (OuterVolumeSpecName: "lock") pod "f66de5f0-1dc9-497e-828e-563484b9f60e" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.692943 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-cache" (OuterVolumeSpecName: "cache") pod "85e899d6-b28a-4b65-bbed-90648be93627" (UID: "85e899d6-b28a-4b65-bbed-90648be93627"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.697011 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-kube-api-access-xms9z" (OuterVolumeSpecName: "kube-api-access-xms9z") pod "85e899d6-b28a-4b65-bbed-90648be93627" (UID: "85e899d6-b28a-4b65-bbed-90648be93627"). InnerVolumeSpecName "kube-api-access-xms9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.697291 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-kube-api-access-d2z52" (OuterVolumeSpecName: "kube-api-access-d2z52") pod "f66de5f0-1dc9-497e-828e-563484b9f60e" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e"). InnerVolumeSpecName "kube-api-access-d2z52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.697400 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-kube-api-access-jwjgs" (OuterVolumeSpecName: "kube-api-access-jwjgs") pod "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd"). InnerVolumeSpecName "kube-api-access-jwjgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.697427 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f66de5f0-1dc9-497e-828e-563484b9f60e" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.697440 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.697830 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "swift") pod "85e899d6-b28a-4b65-bbed-90648be93627" (UID: "85e899d6-b28a-4b65-bbed-90648be93627"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.697868 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "swift") pod "f66de5f0-1dc9-497e-828e-563484b9f60e" (UID: "f66de5f0-1dc9-497e-828e-563484b9f60e"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.698680 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "85e899d6-b28a-4b65-bbed-90648be93627" (UID: "85e899d6-b28a-4b65-bbed-90648be93627"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.698966 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "swift") pod "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" (UID: "510b1209-97a2-4a4c-bc62-60d5c9ce6bcd"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793182 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793501 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793583 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793683 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793737 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwjgs\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-kube-api-access-jwjgs\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793801 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f66de5f0-1dc9-497e-828e-563484b9f60e-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793853 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85e899d6-b28a-4b65-bbed-90648be93627-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793909 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.793991 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.794054 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.794114 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.794179 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2z52\" (UniqueName: \"kubernetes.io/projected/f66de5f0-1dc9-497e-828e-563484b9f60e-kube-api-access-d2z52\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.794251 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xms9z\" (UniqueName: \"kubernetes.io/projected/85e899d6-b28a-4b65-bbed-90648be93627-kube-api-access-xms9z\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.794348 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.806792 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.807183 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.813010 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.895836 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.895879 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.895892 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.937043 5003 generic.go:334] "Generic (PLEG): container finished" podID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerID="6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258" exitCode=137 Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.937231 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.937582 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"f66de5f0-1dc9-497e-828e-563484b9f60e","Type":"ContainerDied","Data":"52abf7d4ccdebca66eca1b67b807063a14f8054ac72e6274b7585334016a8a51"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.937372 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.937673 5003 scope.go:117] "RemoveContainer" containerID="6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.948789 5003 generic.go:334] "Generic (PLEG): container finished" podID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerID="b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6" exitCode=137 Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.948869 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949165 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"510b1209-97a2-4a4c-bc62-60d5c9ce6bcd","Type":"ContainerDied","Data":"f8c4785296e0f598b91e15594e02e9fea625d0b829a5ea6f7723c7befc440b9a"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949018 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949181 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa3f16901f9fac2eeb4f0e98481575cc714ec4e56ed459a337209d6cae3ac9bd"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949289 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c70dc4068dca5aa538646b5a5cf67b9678e3c5da2f2b07c409ac6f6c846c7e7"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949296 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dcf0b5ce28344896d5ad2a9cb5419af7078cf89fa9ce1120a5322d2a61e9b335"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949301 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e240db621933c2cd325c34006ef2c68ab817226ffc8733b8fcedc9e3f09f14fd"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949307 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2fc3c87be59a5ec7ba8a74b787b40113f9389d112d941fb7c2ed672c43b2edfd"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949312 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8223fc509969f6776b993e8185f9b069d5c9f086609a260402a84d4770684a11"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949317 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ad9c01357ab348dfff940847ee95e8e56b30bd4d45d9f4e4af91de556a91141"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949322 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd3ec3973c0fdea5b314fe57cccfee96424716c43decd2b932dcb5a277f50475"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.949327 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cdbd7ccfbd2dda5c8e914cfc64d48a202c645b60787abb3380799caf918f44aa"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.958959 5003 generic.go:334] "Generic (PLEG): container finished" podID="85e899d6-b28a-4b65-bbed-90648be93627" containerID="527c335c96bbde9f354d78b445883d59eef8e57fa945fe3616c7ffa9bbd6770c" exitCode=137 Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959042 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"527c335c96bbde9f354d78b445883d59eef8e57fa945fe3616c7ffa9bbd6770c"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959465 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527c335c96bbde9f354d78b445883d59eef8e57fa945fe3616c7ffa9bbd6770c"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959523 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b415c6a54f47f05402c3feef392d112113942b22d937deaa338e1bd319c82b54"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959588 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eab743ecc90895a84bc0ce0d7cc79df23a1ceb78f36452966772b7ab9eab3467"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959650 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a98aa39ad63d3e3cd260454a7a9de04082531369d39733e70305ac416cea9e"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959700 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97a7f4949dee21f7325aece319ca46ed43183cb5ea18e9253c3f974b31284e87"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959747 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f646834e6851b96fb382e1ef2015754dc2c7b2787e5b226395b64e3b11d176a1"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959798 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c9caba665ad8644e75784ffcbd69d930eacb480b36e91cfd365eb7c9c96ae6f"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959861 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bff9c806ed7cdb75c1f4838b9a094c93dc75ade4c30c0b29d24a1cf2577a3e99"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959916 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4afe6b66ed2fd13ad641a9847b636ca80bb77c44eeae3e5205cb9852b91e1c24"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959976 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d339f409ffcb5325ad919da7c5d4cbda84d02d5b1e4439f03e3c1143cf7fe72"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960036 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c91ab6152d5ea9d28ccd73b6974a0f4d3883709c99852ba25bec0db8c9a6e5ac"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960093 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e34cb9c5f822beead5263fd88e158f66d3b44e29ea1a45b39948e974da9d3afa"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960272 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d28eb610f8b253b7a6064cd6c60e01513782dca50d398abb69cf5666587bc32"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960366 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b07a59c52b0cbdde0c15d799dc68288f4ee14a18c067573b81f96b3508b94432"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960427 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"23f16ba6e5ba6a363d0ee9ae965d1b10aa7a42784a6480cb45492d47033a7ba8"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960499 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"85e899d6-b28a-4b65-bbed-90648be93627","Type":"ContainerDied","Data":"a669710f32b9961ee95e2e12ff38543dd5d2c3533329080d2fb73e90f0e98e33"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960563 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527c335c96bbde9f354d78b445883d59eef8e57fa945fe3616c7ffa9bbd6770c"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960624 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b415c6a54f47f05402c3feef392d112113942b22d937deaa338e1bd319c82b54"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960680 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eab743ecc90895a84bc0ce0d7cc79df23a1ceb78f36452966772b7ab9eab3467"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960735 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a98aa39ad63d3e3cd260454a7a9de04082531369d39733e70305ac416cea9e"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960796 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97a7f4949dee21f7325aece319ca46ed43183cb5ea18e9253c3f974b31284e87"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960852 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f646834e6851b96fb382e1ef2015754dc2c7b2787e5b226395b64e3b11d176a1"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960914 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c9caba665ad8644e75784ffcbd69d930eacb480b36e91cfd365eb7c9c96ae6f"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.960971 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bff9c806ed7cdb75c1f4838b9a094c93dc75ade4c30c0b29d24a1cf2577a3e99"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.961027 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4afe6b66ed2fd13ad641a9847b636ca80bb77c44eeae3e5205cb9852b91e1c24"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.961090 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d339f409ffcb5325ad919da7c5d4cbda84d02d5b1e4439f03e3c1143cf7fe72"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.961146 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c91ab6152d5ea9d28ccd73b6974a0f4d3883709c99852ba25bec0db8c9a6e5ac"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.961199 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e34cb9c5f822beead5263fd88e158f66d3b44e29ea1a45b39948e974da9d3afa"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.961248 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d28eb610f8b253b7a6064cd6c60e01513782dca50d398abb69cf5666587bc32"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.961361 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b07a59c52b0cbdde0c15d799dc68288f4ee14a18c067573b81f96b3508b94432"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.961426 5003 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"23f16ba6e5ba6a363d0ee9ae965d1b10aa7a42784a6480cb45492d47033a7ba8"} Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.959185 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.969220 5003 scope.go:117] "RemoveContainer" containerID="7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b" Jan 26 11:05:20 crc kubenswrapper[5003]: I0126 11:05:20.985473 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.033791 5003 scope.go:117] "RemoveContainer" containerID="f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.043169 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.043511 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.043554 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.066624 5003 scope.go:117] "RemoveContainer" containerID="162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.067398 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.071828 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.090539 5003 scope.go:117] "RemoveContainer" containerID="f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.105910 5003 scope.go:117] "RemoveContainer" containerID="a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.124802 5003 scope.go:117] "RemoveContainer" containerID="ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.168843 5003 scope.go:117] "RemoveContainer" containerID="73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.185008 5003 scope.go:117] "RemoveContainer" containerID="8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.203562 5003 scope.go:117] "RemoveContainer" containerID="8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.221601 5003 scope.go:117] "RemoveContainer" containerID="9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.236932 5003 scope.go:117] "RemoveContainer" containerID="76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.257599 5003 scope.go:117] "RemoveContainer" containerID="4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.274474 5003 scope.go:117] "RemoveContainer" containerID="5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.293337 5003 scope.go:117] "RemoveContainer" containerID="fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.309757 5003 scope.go:117] "RemoveContainer" containerID="6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.310473 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258\": container with ID starting with 6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258 not found: ID does not exist" containerID="6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.310534 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258"} err="failed to get container status \"6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258\": rpc error: code = NotFound desc = could not find container \"6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258\": container with ID starting with 6875290abeab157e08486b92663f5ae51d75ce9c4d1edff0a97352af62aa9258 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.310568 5003 scope.go:117] "RemoveContainer" containerID="7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.311796 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b\": container with ID starting with 7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b not found: ID does not exist" containerID="7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.311842 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b"} err="failed to get container status \"7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b\": rpc error: code = NotFound desc = could not find container \"7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b\": container with ID starting with 7b458a16e007ce6f0cee91e56dbf993488bc1c68a09fe8963fde8a000508741b not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.311929 5003 scope.go:117] "RemoveContainer" containerID="f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.312490 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212\": container with ID starting with f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212 not found: ID does not exist" containerID="f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.312521 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212"} err="failed to get container status \"f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212\": rpc error: code = NotFound desc = could not find container \"f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212\": container with ID starting with f12fcca496d3abd1c909b9cf17dd411cfdd166e83214f3c1c87091761b52a212 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.312541 5003 scope.go:117] "RemoveContainer" containerID="162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.314924 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b\": container with ID starting with 162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b not found: ID does not exist" containerID="162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.314989 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b"} err="failed to get container status \"162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b\": rpc error: code = NotFound desc = could not find container \"162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b\": container with ID starting with 162e1b56b56be0a20546eddac9ef658defb1efdb1e957f4ec5c5844d879ac13b not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.315021 5003 scope.go:117] "RemoveContainer" containerID="f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.315388 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2\": container with ID starting with f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2 not found: ID does not exist" containerID="f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.315413 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2"} err="failed to get container status \"f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2\": rpc error: code = NotFound desc = could not find container \"f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2\": container with ID starting with f8814d625f7117c51d0aec533953e4fa6b9b8b683beab027deb63801d292ffb2 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.315427 5003 scope.go:117] "RemoveContainer" containerID="a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.315666 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4\": container with ID starting with a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4 not found: ID does not exist" containerID="a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.315722 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4"} err="failed to get container status \"a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4\": rpc error: code = NotFound desc = could not find container \"a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4\": container with ID starting with a10effcebd5f35f3664ee774bfb75160c87673f9f08ab86e1ccd324075153ba4 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.315735 5003 scope.go:117] "RemoveContainer" containerID="ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.315957 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51\": container with ID starting with ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51 not found: ID does not exist" containerID="ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.315976 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51"} err="failed to get container status \"ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51\": rpc error: code = NotFound desc = could not find container \"ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51\": container with ID starting with ce19ee917cb68ccf5d427cd056c695990454baa0d29ce04a1105dbd984274d51 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.315989 5003 scope.go:117] "RemoveContainer" containerID="73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.316222 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2\": container with ID starting with 73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2 not found: ID does not exist" containerID="73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.316252 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2"} err="failed to get container status \"73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2\": rpc error: code = NotFound desc = could not find container \"73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2\": container with ID starting with 73e8ebe063e7176c37d9fc88e20295c086ec115cb05b91434693bd33f1f49da2 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.316264 5003 scope.go:117] "RemoveContainer" containerID="8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.316521 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f\": container with ID starting with 8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f not found: ID does not exist" containerID="8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.316555 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f"} err="failed to get container status \"8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f\": rpc error: code = NotFound desc = could not find container \"8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f\": container with ID starting with 8640f61b9595e8cc65355db36b00c634c8f37f6bedb38c5576effa879d69cf8f not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.316576 5003 scope.go:117] "RemoveContainer" containerID="8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.316850 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4\": container with ID starting with 8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4 not found: ID does not exist" containerID="8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.316894 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4"} err="failed to get container status \"8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4\": rpc error: code = NotFound desc = could not find container \"8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4\": container with ID starting with 8d6c329a9bc40fbf978e8c86849f93750f8ea4257e5419fc4efd17dfa1c42ba4 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.316923 5003 scope.go:117] "RemoveContainer" containerID="9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.317200 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3\": container with ID starting with 9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3 not found: ID does not exist" containerID="9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.317228 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3"} err="failed to get container status \"9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3\": rpc error: code = NotFound desc = could not find container \"9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3\": container with ID starting with 9a891062f08c6042aebdb2b82937f07a228467b3ed7709963fb06d4fa378eef3 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.317245 5003 scope.go:117] "RemoveContainer" containerID="76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.317494 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e\": container with ID starting with 76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e not found: ID does not exist" containerID="76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.317517 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e"} err="failed to get container status \"76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e\": rpc error: code = NotFound desc = could not find container \"76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e\": container with ID starting with 76c91568fed3d3be7b0d221f08c5227712a61135c9639cbdc2b07a9d4d24752e not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.317531 5003 scope.go:117] "RemoveContainer" containerID="4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.317749 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53\": container with ID starting with 4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53 not found: ID does not exist" containerID="4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.317775 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53"} err="failed to get container status \"4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53\": rpc error: code = NotFound desc = could not find container \"4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53\": container with ID starting with 4384daada70a6c99d7a2ffb160ce6ab354e3942cd7ba8daebd42308333183a53 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.317791 5003 scope.go:117] "RemoveContainer" containerID="5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.318057 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320\": container with ID starting with 5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320 not found: ID does not exist" containerID="5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.318080 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320"} err="failed to get container status \"5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320\": rpc error: code = NotFound desc = could not find container \"5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320\": container with ID starting with 5e93c0b13111352b01ab24a7699b92113b6bf3ebf4cf55453135ded7ea36f320 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.318095 5003 scope.go:117] "RemoveContainer" containerID="fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.318297 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1\": container with ID starting with fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1 not found: ID does not exist" containerID="fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.318315 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1"} err="failed to get container status \"fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1\": rpc error: code = NotFound desc = could not find container \"fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1\": container with ID starting with fa643b20c3314cb09c04e05aa736116433ac48b341a87f5e0541497454d90ae1 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.318329 5003 scope.go:117] "RemoveContainer" containerID="b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.337422 5003 scope.go:117] "RemoveContainer" containerID="18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.353499 5003 scope.go:117] "RemoveContainer" containerID="07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.367998 5003 scope.go:117] "RemoveContainer" containerID="bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.383620 5003 scope.go:117] "RemoveContainer" containerID="c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.403818 5003 scope.go:117] "RemoveContainer" containerID="178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.421226 5003 scope.go:117] "RemoveContainer" containerID="aa3f16901f9fac2eeb4f0e98481575cc714ec4e56ed459a337209d6cae3ac9bd" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.439088 5003 scope.go:117] "RemoveContainer" containerID="4c70dc4068dca5aa538646b5a5cf67b9678e3c5da2f2b07c409ac6f6c846c7e7" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.461429 5003 scope.go:117] "RemoveContainer" containerID="dcf0b5ce28344896d5ad2a9cb5419af7078cf89fa9ce1120a5322d2a61e9b335" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.478166 5003 scope.go:117] "RemoveContainer" containerID="e240db621933c2cd325c34006ef2c68ab817226ffc8733b8fcedc9e3f09f14fd" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.494154 5003 scope.go:117] "RemoveContainer" containerID="2fc3c87be59a5ec7ba8a74b787b40113f9389d112d941fb7c2ed672c43b2edfd" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.517921 5003 scope.go:117] "RemoveContainer" containerID="8223fc509969f6776b993e8185f9b069d5c9f086609a260402a84d4770684a11" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.534148 5003 scope.go:117] "RemoveContainer" containerID="4ad9c01357ab348dfff940847ee95e8e56b30bd4d45d9f4e4af91de556a91141" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.556081 5003 scope.go:117] "RemoveContainer" containerID="cd3ec3973c0fdea5b314fe57cccfee96424716c43decd2b932dcb5a277f50475" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.571786 5003 scope.go:117] "RemoveContainer" containerID="cdbd7ccfbd2dda5c8e914cfc64d48a202c645b60787abb3380799caf918f44aa" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.589054 5003 scope.go:117] "RemoveContainer" containerID="b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.589494 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6\": container with ID starting with b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6 not found: ID does not exist" containerID="b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.589526 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6"} err="failed to get container status \"b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6\": rpc error: code = NotFound desc = could not find container \"b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6\": container with ID starting with b1a0e343cad805ae8c6408da7e1ec9232470d5e94d9a936e6615f7c3187c00c6 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.589544 5003 scope.go:117] "RemoveContainer" containerID="18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.589783 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8\": container with ID starting with 18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8 not found: ID does not exist" containerID="18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.589806 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8"} err="failed to get container status \"18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8\": rpc error: code = NotFound desc = could not find container \"18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8\": container with ID starting with 18e58aa23f76d39065cf7ec27c37a7736d755ff1746ffe71aa5eb4d936e040d8 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.589820 5003 scope.go:117] "RemoveContainer" containerID="07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.590035 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da\": container with ID starting with 07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da not found: ID does not exist" containerID="07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.590056 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da"} err="failed to get container status \"07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da\": rpc error: code = NotFound desc = could not find container \"07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da\": container with ID starting with 07bd1aaf9672dd7b6a3a855467320342869a84bbfd503485f147fb074f7b62da not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.590071 5003 scope.go:117] "RemoveContainer" containerID="bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.590249 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a\": container with ID starting with bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a not found: ID does not exist" containerID="bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.590274 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a"} err="failed to get container status \"bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a\": rpc error: code = NotFound desc = could not find container \"bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a\": container with ID starting with bed31d1f568e801654716e146fb9ddb44ea521879275a28f8dde39238b54ba0a not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.590309 5003 scope.go:117] "RemoveContainer" containerID="c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.590590 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51\": container with ID starting with c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51 not found: ID does not exist" containerID="c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.590616 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51"} err="failed to get container status \"c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51\": rpc error: code = NotFound desc = could not find container \"c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51\": container with ID starting with c44a317d8182394990b6b241828022a2a0a8d9496dc2292f96a1cc72b55ade51 not found: ID does not exist" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.590630 5003 scope.go:117] "RemoveContainer" containerID="178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f" Jan 26 11:05:21 crc kubenswrapper[5003]: E0126 11:05:21.590899 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f\": container with ID starting with 178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f not found: ID does not exist" containerID="178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f" Jan 26 11:05:21 crc kubenswrapper[5003]: I0126 11:05:21.590932 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f"} err="failed to get container status \"178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f\": rpc error: code = NotFound desc = could not find container \"178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f\": container with ID starting with 178bd810c5348d96a9efd14bc38890c354eeb36dd59dc06f183129fe85ea1d5f not found: ID does not exist" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.012490 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" path="/var/lib/kubelet/pods/510b1209-97a2-4a4c-bc62-60d5c9ce6bcd/volumes" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.015630 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85e899d6-b28a-4b65-bbed-90648be93627" path="/var/lib/kubelet/pods/85e899d6-b28a-4b65-bbed-90648be93627/volumes" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.017588 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" path="/var/lib/kubelet/pods/f66de5f0-1dc9-497e-828e-563484b9f60e/volumes" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.895148 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.895837 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.895929 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.896021 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.896092 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.896165 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="168289f3-76bd-4518-8672-b02c64df8a27" containerName="proxy-httpd" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.896264 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="168289f3-76bd-4518-8672-b02c64df8a27" containerName="proxy-httpd" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.896369 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.896447 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.896522 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="168289f3-76bd-4518-8672-b02c64df8a27" containerName="proxy-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.896599 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="168289f3-76bd-4518-8672-b02c64df8a27" containerName="proxy-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.896669 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.896742 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.896816 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.896895 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.896970 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.897036 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.897108 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.897179 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.897255 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.897414 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.897502 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.897617 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.897707 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.897790 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.897869 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.897964 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.898050 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.898132 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.898276 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.898388 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.898472 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.898553 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.898636 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.898706 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.898781 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.898895 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.898982 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.899050 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.899243 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.899338 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.899417 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.899484 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.899576 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.899646 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.899721 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.899793 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.899867 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d303cb3-63f2-4130-a35f-000b31b5d414" containerName="swift-ring-rebalance" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.899934 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d303cb3-63f2-4130-a35f-000b31b5d414" containerName="swift-ring-rebalance" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.900005 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.900075 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.900147 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.900248 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.900345 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.900435 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.900505 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.900572 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.900641 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.900715 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.900778 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.900847 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.900913 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.900989 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.901055 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.901127 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.901217 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.901315 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.901389 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.901456 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.901523 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.901599 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.901666 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.901725 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.901799 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.901864 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.901959 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.902038 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.902143 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.902217 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.902302 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.902371 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.902446 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.902516 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.902597 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.902678 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.902750 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.902822 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.902896 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.902967 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.903048 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.903114 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.903187 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.903253 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.903341 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.903422 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: E0126 11:05:23.903512 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.903582 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.903931 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="168289f3-76bd-4518-8672-b02c64df8a27" containerName="proxy-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904014 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d303cb3-63f2-4130-a35f-000b31b5d414" containerName="swift-ring-rebalance" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904087 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904473 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904557 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904620 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904679 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="168289f3-76bd-4518-8672-b02c64df8a27" containerName="proxy-httpd" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904739 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904802 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904854 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904905 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.904961 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905016 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905073 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905132 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905185 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905235 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905315 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905399 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905476 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905547 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905628 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905687 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905737 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905800 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905859 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905911 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.905959 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-reaper" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906014 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906066 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-server" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906134 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906209 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906264 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906350 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="rsync" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906406 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906457 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906519 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="swift-recon-cron" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906571 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906625 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-updater" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906676 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="object-expirer" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906726 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906781 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="object-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906855 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.906934 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="container-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.907003 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="510b1209-97a2-4a4c-bc62-60d5c9ce6bcd" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.907057 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="container-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.907114 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e899d6-b28a-4b65-bbed-90648be93627" containerName="account-replicator" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.907168 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66de5f0-1dc9-497e-828e-563484b9f60e" containerName="account-auditor" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.912425 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.914965 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.914983 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-9j7qz" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.915658 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.915830 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Jan 26 11:05:23 crc kubenswrapper[5003]: I0126 11:05:23.919134 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.039745 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.039807 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.039829 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7sbx\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-kube-api-access-b7sbx\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.039864 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-cache\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.039953 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-lock\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.125464 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg"] Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.126853 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.133079 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.136016 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg"] Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.141192 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7sbx\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-kube-api-access-b7sbx\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.141239 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.141302 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-cache\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.141370 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-lock\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.141432 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.141778 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") device mount path \"/mnt/openstack/pv09\"" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.142024 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.142051 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.142107 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift podName:dbc6d977-1883-4a60-9768-39fe20a4c4ed nodeName:}" failed. No retries permitted until 2026-01-26 11:05:24.642087981 +0000 UTC m=+1340.183313542 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift") pod "swift-storage-0" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed") : configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.142194 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-cache\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.142240 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-lock\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.164903 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7sbx\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-kube-api-access-b7sbx\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.165309 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.242315 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-config-data\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.242378 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh4gv\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-kube-api-access-fh4gv\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.242422 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.242443 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-run-httpd\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.242504 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-log-httpd\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.344444 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-config-data\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.344524 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh4gv\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-kube-api-access-fh4gv\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.344583 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.344637 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-run-httpd\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.344693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-log-httpd\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.344827 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.344881 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg: configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.344940 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift podName:2bc7f0bc-22bd-427a-87b9-d00ea1c2f694 nodeName:}" failed. No retries permitted until 2026-01-26 11:05:24.844920402 +0000 UTC m=+1340.386145963 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift") pod "swift-proxy-6bb4649ff-slvzg" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694") : configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.345253 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-log-httpd\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.345341 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-run-httpd\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.357996 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-config-data\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.367513 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh4gv\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-kube-api-access-fh4gv\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.649304 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.649546 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.649576 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.649638 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift podName:dbc6d977-1883-4a60-9768-39fe20a4c4ed nodeName:}" failed. No retries permitted until 2026-01-26 11:05:25.649618762 +0000 UTC m=+1341.190844323 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift") pod "swift-storage-0" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed") : configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: I0126 11:05:24.852138 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.852355 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.852391 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg: configmap "swift-ring-files" not found Jan 26 11:05:24 crc kubenswrapper[5003]: E0126 11:05:24.852450 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift podName:2bc7f0bc-22bd-427a-87b9-d00ea1c2f694 nodeName:}" failed. No retries permitted until 2026-01-26 11:05:25.852432242 +0000 UTC m=+1341.393657803 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift") pod "swift-proxy-6bb4649ff-slvzg" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694") : configmap "swift-ring-files" not found Jan 26 11:05:25 crc kubenswrapper[5003]: I0126 11:05:25.665642 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:25 crc kubenswrapper[5003]: E0126 11:05:25.665881 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:25 crc kubenswrapper[5003]: E0126 11:05:25.665917 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:05:25 crc kubenswrapper[5003]: E0126 11:05:25.665972 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift podName:dbc6d977-1883-4a60-9768-39fe20a4c4ed nodeName:}" failed. No retries permitted until 2026-01-26 11:05:27.665955309 +0000 UTC m=+1343.207180870 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift") pod "swift-storage-0" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed") : configmap "swift-ring-files" not found Jan 26 11:05:25 crc kubenswrapper[5003]: I0126 11:05:25.868951 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:25 crc kubenswrapper[5003]: E0126 11:05:25.869144 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:25 crc kubenswrapper[5003]: E0126 11:05:25.869169 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg: configmap "swift-ring-files" not found Jan 26 11:05:25 crc kubenswrapper[5003]: E0126 11:05:25.869231 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift podName:2bc7f0bc-22bd-427a-87b9-d00ea1c2f694 nodeName:}" failed. No retries permitted until 2026-01-26 11:05:27.869210113 +0000 UTC m=+1343.410435674 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift") pod "swift-proxy-6bb4649ff-slvzg" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694") : configmap "swift-ring-files" not found Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.697961 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:27 crc kubenswrapper[5003]: E0126 11:05:27.698216 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:27 crc kubenswrapper[5003]: E0126 11:05:27.698435 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:05:27 crc kubenswrapper[5003]: E0126 11:05:27.698521 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift podName:dbc6d977-1883-4a60-9768-39fe20a4c4ed nodeName:}" failed. No retries permitted until 2026-01-26 11:05:31.698494682 +0000 UTC m=+1347.239720273 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift") pod "swift-storage-0" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed") : configmap "swift-ring-files" not found Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.749329 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s6xzf"] Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.750543 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.752525 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.753196 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.759553 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s6xzf"] Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.901665 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-scripts\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.901747 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b8e3ebf8-f546-4806-a82d-bca78c5af057-etc-swift\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.901779 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-ring-data-devices\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.901802 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-swiftconf\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.901830 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:27 crc kubenswrapper[5003]: E0126 11:05:27.902026 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:27 crc kubenswrapper[5003]: E0126 11:05:27.902062 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg: configmap "swift-ring-files" not found Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.902036 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwxpn\" (UniqueName: \"kubernetes.io/projected/b8e3ebf8-f546-4806-a82d-bca78c5af057-kube-api-access-bwxpn\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:27 crc kubenswrapper[5003]: E0126 11:05:27.902131 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift podName:2bc7f0bc-22bd-427a-87b9-d00ea1c2f694 nodeName:}" failed. No retries permitted until 2026-01-26 11:05:31.902105946 +0000 UTC m=+1347.443331507 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift") pod "swift-proxy-6bb4649ff-slvzg" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694") : configmap "swift-ring-files" not found Jan 26 11:05:27 crc kubenswrapper[5003]: I0126 11:05:27.902190 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-dispersionconf\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.003115 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b8e3ebf8-f546-4806-a82d-bca78c5af057-etc-swift\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.003183 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-ring-data-devices\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.003216 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-swiftconf\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.003270 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwxpn\" (UniqueName: \"kubernetes.io/projected/b8e3ebf8-f546-4806-a82d-bca78c5af057-kube-api-access-bwxpn\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.003328 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-dispersionconf\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.003423 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-scripts\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.003827 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b8e3ebf8-f546-4806-a82d-bca78c5af057-etc-swift\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.004322 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-ring-data-devices\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.004522 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-scripts\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.009535 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-dispersionconf\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.013186 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-swiftconf\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.032953 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwxpn\" (UniqueName: \"kubernetes.io/projected/b8e3ebf8-f546-4806-a82d-bca78c5af057-kube-api-access-bwxpn\") pod \"swift-ring-rebalance-s6xzf\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.083833 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:28 crc kubenswrapper[5003]: I0126 11:05:28.497154 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s6xzf"] Jan 26 11:05:29 crc kubenswrapper[5003]: I0126 11:05:29.031929 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" event={"ID":"b8e3ebf8-f546-4806-a82d-bca78c5af057","Type":"ContainerStarted","Data":"d3e4b89d816b719481fc57de309d3c349994a5562e0428530b96dddee0f18332"} Jan 26 11:05:29 crc kubenswrapper[5003]: I0126 11:05:29.032231 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" event={"ID":"b8e3ebf8-f546-4806-a82d-bca78c5af057","Type":"ContainerStarted","Data":"05859bfb745cc7ab328f64190ba16584fc5143891a10fb7cf71cee414797fc10"} Jan 26 11:05:29 crc kubenswrapper[5003]: I0126 11:05:29.050599 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" podStartSLOduration=2.050579489 podStartE2EDuration="2.050579489s" podCreationTimestamp="2026-01-26 11:05:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:05:29.045214435 +0000 UTC m=+1344.586440026" watchObservedRunningTime="2026-01-26 11:05:29.050579489 +0000 UTC m=+1344.591805050" Jan 26 11:05:31 crc kubenswrapper[5003]: I0126 11:05:31.758519 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:31 crc kubenswrapper[5003]: E0126 11:05:31.758734 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:31 crc kubenswrapper[5003]: E0126 11:05:31.759156 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:05:31 crc kubenswrapper[5003]: E0126 11:05:31.759221 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift podName:dbc6d977-1883-4a60-9768-39fe20a4c4ed nodeName:}" failed. No retries permitted until 2026-01-26 11:05:39.759203631 +0000 UTC m=+1355.300429202 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift") pod "swift-storage-0" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed") : configmap "swift-ring-files" not found Jan 26 11:05:31 crc kubenswrapper[5003]: I0126 11:05:31.962704 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:31 crc kubenswrapper[5003]: E0126 11:05:31.962907 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:05:31 crc kubenswrapper[5003]: E0126 11:05:31.962947 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg: configmap "swift-ring-files" not found Jan 26 11:05:31 crc kubenswrapper[5003]: E0126 11:05:31.963018 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift podName:2bc7f0bc-22bd-427a-87b9-d00ea1c2f694 nodeName:}" failed. No retries permitted until 2026-01-26 11:05:39.96299635 +0000 UTC m=+1355.504221911 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift") pod "swift-proxy-6bb4649ff-slvzg" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694") : configmap "swift-ring-files" not found Jan 26 11:05:36 crc kubenswrapper[5003]: I0126 11:05:36.104364 5003 generic.go:334] "Generic (PLEG): container finished" podID="b8e3ebf8-f546-4806-a82d-bca78c5af057" containerID="d3e4b89d816b719481fc57de309d3c349994a5562e0428530b96dddee0f18332" exitCode=0 Jan 26 11:05:36 crc kubenswrapper[5003]: I0126 11:05:36.104458 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" event={"ID":"b8e3ebf8-f546-4806-a82d-bca78c5af057","Type":"ContainerDied","Data":"d3e4b89d816b719481fc57de309d3c349994a5562e0428530b96dddee0f18332"} Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.454869 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.552235 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-scripts\") pod \"b8e3ebf8-f546-4806-a82d-bca78c5af057\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.552314 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-swiftconf\") pod \"b8e3ebf8-f546-4806-a82d-bca78c5af057\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.552340 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-ring-data-devices\") pod \"b8e3ebf8-f546-4806-a82d-bca78c5af057\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.552419 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-dispersionconf\") pod \"b8e3ebf8-f546-4806-a82d-bca78c5af057\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.552495 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b8e3ebf8-f546-4806-a82d-bca78c5af057-etc-swift\") pod \"b8e3ebf8-f546-4806-a82d-bca78c5af057\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.552595 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwxpn\" (UniqueName: \"kubernetes.io/projected/b8e3ebf8-f546-4806-a82d-bca78c5af057-kube-api-access-bwxpn\") pod \"b8e3ebf8-f546-4806-a82d-bca78c5af057\" (UID: \"b8e3ebf8-f546-4806-a82d-bca78c5af057\") " Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.553064 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "b8e3ebf8-f546-4806-a82d-bca78c5af057" (UID: "b8e3ebf8-f546-4806-a82d-bca78c5af057"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.553970 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8e3ebf8-f546-4806-a82d-bca78c5af057-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b8e3ebf8-f546-4806-a82d-bca78c5af057" (UID: "b8e3ebf8-f546-4806-a82d-bca78c5af057"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.558490 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8e3ebf8-f546-4806-a82d-bca78c5af057-kube-api-access-bwxpn" (OuterVolumeSpecName: "kube-api-access-bwxpn") pod "b8e3ebf8-f546-4806-a82d-bca78c5af057" (UID: "b8e3ebf8-f546-4806-a82d-bca78c5af057"). InnerVolumeSpecName "kube-api-access-bwxpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.575204 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "b8e3ebf8-f546-4806-a82d-bca78c5af057" (UID: "b8e3ebf8-f546-4806-a82d-bca78c5af057"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.583331 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-scripts" (OuterVolumeSpecName: "scripts") pod "b8e3ebf8-f546-4806-a82d-bca78c5af057" (UID: "b8e3ebf8-f546-4806-a82d-bca78c5af057"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.584354 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "b8e3ebf8-f546-4806-a82d-bca78c5af057" (UID: "b8e3ebf8-f546-4806-a82d-bca78c5af057"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.654679 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.654723 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b8e3ebf8-f546-4806-a82d-bca78c5af057-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.654738 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwxpn\" (UniqueName: \"kubernetes.io/projected/b8e3ebf8-f546-4806-a82d-bca78c5af057-kube-api-access-bwxpn\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.654753 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.654776 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b8e3ebf8-f546-4806-a82d-bca78c5af057-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:37 crc kubenswrapper[5003]: I0126 11:05:37.654787 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b8e3ebf8-f546-4806-a82d-bca78c5af057-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:38 crc kubenswrapper[5003]: I0126 11:05:38.125323 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" event={"ID":"b8e3ebf8-f546-4806-a82d-bca78c5af057","Type":"ContainerDied","Data":"05859bfb745cc7ab328f64190ba16584fc5143891a10fb7cf71cee414797fc10"} Jan 26 11:05:38 crc kubenswrapper[5003]: I0126 11:05:38.125620 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05859bfb745cc7ab328f64190ba16584fc5143891a10fb7cf71cee414797fc10" Jan 26 11:05:38 crc kubenswrapper[5003]: I0126 11:05:38.125409 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-s6xzf" Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.041027 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.041161 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.041253 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.042615 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d611de2469cbe98c2fe1bc7ea60af3e72e8a66e47fcfb0fbfee926d96efd43c1"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.042781 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://d611de2469cbe98c2fe1bc7ea60af3e72e8a66e47fcfb0fbfee926d96efd43c1" gracePeriod=600 Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.790482 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.798155 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"swift-storage-0\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.828725 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:05:39 crc kubenswrapper[5003]: I0126 11:05:39.995402 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:40 crc kubenswrapper[5003]: I0126 11:05:40.002242 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"swift-proxy-6bb4649ff-slvzg\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:40 crc kubenswrapper[5003]: I0126 11:05:40.043069 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:40 crc kubenswrapper[5003]: I0126 11:05:40.128649 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:05:40 crc kubenswrapper[5003]: W0126 11:05:40.148644 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbc6d977_1883_4a60_9768_39fe20a4c4ed.slice/crio-e5aba6207790bb7a5e94cc2f4b75932ead8fc6d0ca091bbbde6bd881b42a9116 WatchSource:0}: Error finding container e5aba6207790bb7a5e94cc2f4b75932ead8fc6d0ca091bbbde6bd881b42a9116: Status 404 returned error can't find the container with id e5aba6207790bb7a5e94cc2f4b75932ead8fc6d0ca091bbbde6bd881b42a9116 Jan 26 11:05:40 crc kubenswrapper[5003]: I0126 11:05:40.154099 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="d611de2469cbe98c2fe1bc7ea60af3e72e8a66e47fcfb0fbfee926d96efd43c1" exitCode=0 Jan 26 11:05:40 crc kubenswrapper[5003]: I0126 11:05:40.154188 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"d611de2469cbe98c2fe1bc7ea60af3e72e8a66e47fcfb0fbfee926d96efd43c1"} Jan 26 11:05:40 crc kubenswrapper[5003]: I0126 11:05:40.154242 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2"} Jan 26 11:05:40 crc kubenswrapper[5003]: I0126 11:05:40.154270 5003 scope.go:117] "RemoveContainer" containerID="e95ef3044b7da7897332a1c0dc0a352de84ea5dd8273e8eb61313248ed95c0df" Jan 26 11:05:40 crc kubenswrapper[5003]: I0126 11:05:40.488792 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg"] Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.186986 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" event={"ID":"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694","Type":"ContainerStarted","Data":"a4f87e9e7eeab211e14c1af3410ec6ff5b88e2d85a1d404a004ce6ca7e422e97"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.187654 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" event={"ID":"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694","Type":"ContainerStarted","Data":"05366d27155f09233b67ea19715d9c8893d8571ecfb3131b98af5e9b5ca1b0ba"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.187675 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" event={"ID":"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694","Type":"ContainerStarted","Data":"e0e5b01cf353ad98c5225da41c05a2a8003531f8ccc8f729916fdda6145022e9"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.188381 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.188421 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.199954 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"26970c88275d831b8cfc37137ac332f72c9c80205bd79693e2fb844ca850c5d4"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.200005 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"20361e02fa6e5504119da17f3f4317a34a78ea4e3d97257903221bf6a3465487"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.200024 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"78c305d992140bcfbcf7965e3cc7061e2a9c92d5781980c307adf30362dff748"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.200037 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"6856586b6764d2d9ade5ac7f106ccb105b3cea21024fb4cd45cea64a433a433a"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.200049 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"b5778b163f43e4199e61bf680d32df88f835c04806af665bb62481b12c58c529"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.200061 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"e5aba6207790bb7a5e94cc2f4b75932ead8fc6d0ca091bbbde6bd881b42a9116"} Jan 26 11:05:41 crc kubenswrapper[5003]: I0126 11:05:41.221788 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" podStartSLOduration=17.221767602 podStartE2EDuration="17.221767602s" podCreationTimestamp="2026-01-26 11:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:05:41.208864792 +0000 UTC m=+1356.750090383" watchObservedRunningTime="2026-01-26 11:05:41.221767602 +0000 UTC m=+1356.762993163" Jan 26 11:05:42 crc kubenswrapper[5003]: I0126 11:05:42.217884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"fe6a82e6537f704100d86733c47045a0dc11ad52f91e05cc9d27c53228f474aa"} Jan 26 11:05:42 crc kubenswrapper[5003]: I0126 11:05:42.218548 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"1892c5df0f97ddc761455fbae7b3bc00f433ac82c936df6b99e816502660346d"} Jan 26 11:05:42 crc kubenswrapper[5003]: I0126 11:05:42.218562 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"837369ec91ac552adf85b9327cfdbe55939dba4ab49d8e1df1ab28f68db74b0a"} Jan 26 11:05:42 crc kubenswrapper[5003]: I0126 11:05:42.218572 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"2aeafc4ab81c00b82fba0ca8086655e2659ed60f1a7aba204706e01651f8217f"} Jan 26 11:05:42 crc kubenswrapper[5003]: I0126 11:05:42.218584 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"e9d4e2006f4906e90bf1c99cfc7b76c8096502c24667a7891f56f3e031c65e92"} Jan 26 11:05:42 crc kubenswrapper[5003]: I0126 11:05:42.218593 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"eb1fe80ff8611d70f827cd9cf0c1c51537e7e074100f3683bf5601788cd0e021"} Jan 26 11:05:43 crc kubenswrapper[5003]: I0126 11:05:43.233005 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"ce3bd2f7dc397b83e2c98a106abb9f0255350ec691bff08ca793eea8359bb86b"} Jan 26 11:05:43 crc kubenswrapper[5003]: I0126 11:05:43.233084 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"9525c4f6af285b83022dc7872ad7d96e0258131dd1b1b3a54621fece4d73b356"} Jan 26 11:05:43 crc kubenswrapper[5003]: I0126 11:05:43.233100 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"c58218c1c9329d527320c12af5203b6766d66444d7396a6621fc110a94bf364b"} Jan 26 11:05:43 crc kubenswrapper[5003]: I0126 11:05:43.233112 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerStarted","Data":"f56b122c36e67f251c10dde9254f2924069036f367b8b889092f255fcdc60473"} Jan 26 11:05:43 crc kubenswrapper[5003]: I0126 11:05:43.273349 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=21.273325358 podStartE2EDuration="21.273325358s" podCreationTimestamp="2026-01-26 11:05:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:05:43.266597185 +0000 UTC m=+1358.807822746" watchObservedRunningTime="2026-01-26 11:05:43.273325358 +0000 UTC m=+1358.814550919" Jan 26 11:05:45 crc kubenswrapper[5003]: I0126 11:05:45.057221 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:45 crc kubenswrapper[5003]: I0126 11:05:45.057947 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.196905 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197473 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-server" containerID="cri-o://b5778b163f43e4199e61bf680d32df88f835c04806af665bb62481b12c58c529" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197561 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-auditor" containerID="cri-o://1892c5df0f97ddc761455fbae7b3bc00f433ac82c936df6b99e816502660346d" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197585 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-updater" containerID="cri-o://e9d4e2006f4906e90bf1c99cfc7b76c8096502c24667a7891f56f3e031c65e92" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197641 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-replicator" containerID="cri-o://837369ec91ac552adf85b9327cfdbe55939dba4ab49d8e1df1ab28f68db74b0a" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197718 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-updater" containerID="cri-o://ce3bd2f7dc397b83e2c98a106abb9f0255350ec691bff08ca793eea8359bb86b" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197745 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-server" containerID="cri-o://2aeafc4ab81c00b82fba0ca8086655e2659ed60f1a7aba204706e01651f8217f" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197622 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-expirer" containerID="cri-o://f56b122c36e67f251c10dde9254f2924069036f367b8b889092f255fcdc60473" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197568 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="swift-recon-cron" containerID="cri-o://9525c4f6af285b83022dc7872ad7d96e0258131dd1b1b3a54621fece4d73b356" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197822 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-server" containerID="cri-o://26970c88275d831b8cfc37137ac332f72c9c80205bd79693e2fb844ca850c5d4" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197849 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-auditor" containerID="cri-o://78c305d992140bcfbcf7965e3cc7061e2a9c92d5781980c307adf30362dff748" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197854 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-auditor" containerID="cri-o://eb1fe80ff8611d70f827cd9cf0c1c51537e7e074100f3683bf5601788cd0e021" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197862 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-reaper" containerID="cri-o://20361e02fa6e5504119da17f3f4317a34a78ea4e3d97257903221bf6a3465487" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197906 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-replicator" containerID="cri-o://fe6a82e6537f704100d86733c47045a0dc11ad52f91e05cc9d27c53228f474aa" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.197922 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-replicator" containerID="cri-o://6856586b6764d2d9ade5ac7f106ccb105b3cea21024fb4cd45cea64a433a433a" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.199515 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="rsync" containerID="cri-o://c58218c1c9329d527320c12af5203b6766d66444d7396a6621fc110a94bf364b" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.207630 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s6xzf"] Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.212456 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s6xzf"] Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.601178 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg"] Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.601710 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerName="proxy-httpd" containerID="cri-o://05366d27155f09233b67ea19715d9c8893d8571ecfb3131b98af5e9b5ca1b0ba" gracePeriod=30 Jan 26 11:05:46 crc kubenswrapper[5003]: I0126 11:05:46.601791 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerName="proxy-server" containerID="cri-o://a4f87e9e7eeab211e14c1af3410ec6ff5b88e2d85a1d404a004ce6ca7e422e97" gracePeriod=30 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.012924 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8e3ebf8-f546-4806-a82d-bca78c5af057" path="/var/lib/kubelet/pods/b8e3ebf8-f546-4806-a82d-bca78c5af057/volumes" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.266832 5003 generic.go:334] "Generic (PLEG): container finished" podID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerID="a4f87e9e7eeab211e14c1af3410ec6ff5b88e2d85a1d404a004ce6ca7e422e97" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.266875 5003 generic.go:334] "Generic (PLEG): container finished" podID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerID="05366d27155f09233b67ea19715d9c8893d8571ecfb3131b98af5e9b5ca1b0ba" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.266884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" event={"ID":"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694","Type":"ContainerDied","Data":"a4f87e9e7eeab211e14c1af3410ec6ff5b88e2d85a1d404a004ce6ca7e422e97"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.266932 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" event={"ID":"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694","Type":"ContainerDied","Data":"05366d27155f09233b67ea19715d9c8893d8571ecfb3131b98af5e9b5ca1b0ba"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275675 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="c58218c1c9329d527320c12af5203b6766d66444d7396a6621fc110a94bf364b" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275707 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="f56b122c36e67f251c10dde9254f2924069036f367b8b889092f255fcdc60473" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275717 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="ce3bd2f7dc397b83e2c98a106abb9f0255350ec691bff08ca793eea8359bb86b" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275723 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="1892c5df0f97ddc761455fbae7b3bc00f433ac82c936df6b99e816502660346d" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275733 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="837369ec91ac552adf85b9327cfdbe55939dba4ab49d8e1df1ab28f68db74b0a" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275739 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="2aeafc4ab81c00b82fba0ca8086655e2659ed60f1a7aba204706e01651f8217f" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275746 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="e9d4e2006f4906e90bf1c99cfc7b76c8096502c24667a7891f56f3e031c65e92" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275754 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="eb1fe80ff8611d70f827cd9cf0c1c51537e7e074100f3683bf5601788cd0e021" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275761 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="fe6a82e6537f704100d86733c47045a0dc11ad52f91e05cc9d27c53228f474aa" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275768 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="26970c88275d831b8cfc37137ac332f72c9c80205bd79693e2fb844ca850c5d4" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275781 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="20361e02fa6e5504119da17f3f4317a34a78ea4e3d97257903221bf6a3465487" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275788 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="78c305d992140bcfbcf7965e3cc7061e2a9c92d5781980c307adf30362dff748" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275799 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="6856586b6764d2d9ade5ac7f106ccb105b3cea21024fb4cd45cea64a433a433a" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275806 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="b5778b163f43e4199e61bf680d32df88f835c04806af665bb62481b12c58c529" exitCode=0 Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275752 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"c58218c1c9329d527320c12af5203b6766d66444d7396a6621fc110a94bf364b"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275839 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"f56b122c36e67f251c10dde9254f2924069036f367b8b889092f255fcdc60473"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275857 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"ce3bd2f7dc397b83e2c98a106abb9f0255350ec691bff08ca793eea8359bb86b"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275870 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"1892c5df0f97ddc761455fbae7b3bc00f433ac82c936df6b99e816502660346d"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275881 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"837369ec91ac552adf85b9327cfdbe55939dba4ab49d8e1df1ab28f68db74b0a"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275894 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"2aeafc4ab81c00b82fba0ca8086655e2659ed60f1a7aba204706e01651f8217f"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275905 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"e9d4e2006f4906e90bf1c99cfc7b76c8096502c24667a7891f56f3e031c65e92"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275915 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"eb1fe80ff8611d70f827cd9cf0c1c51537e7e074100f3683bf5601788cd0e021"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275930 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"fe6a82e6537f704100d86733c47045a0dc11ad52f91e05cc9d27c53228f474aa"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275941 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"26970c88275d831b8cfc37137ac332f72c9c80205bd79693e2fb844ca850c5d4"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275955 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"20361e02fa6e5504119da17f3f4317a34a78ea4e3d97257903221bf6a3465487"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275965 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"78c305d992140bcfbcf7965e3cc7061e2a9c92d5781980c307adf30362dff748"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275975 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"6856586b6764d2d9ade5ac7f106ccb105b3cea21024fb4cd45cea64a433a433a"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.275986 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"b5778b163f43e4199e61bf680d32df88f835c04806af665bb62481b12c58c529"} Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.310230 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.419632 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh4gv\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-kube-api-access-fh4gv\") pod \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.419690 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") pod \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.419782 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-run-httpd\") pod \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.419858 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-log-httpd\") pod \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.419890 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-config-data\") pod \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\" (UID: \"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694\") " Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.420450 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.420532 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.428176 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.428557 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-kube-api-access-fh4gv" (OuterVolumeSpecName: "kube-api-access-fh4gv") pod "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694"). InnerVolumeSpecName "kube-api-access-fh4gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.480029 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-config-data" (OuterVolumeSpecName: "config-data") pod "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" (UID: "2bc7f0bc-22bd-427a-87b9-d00ea1c2f694"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.522084 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.522127 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.522137 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.522147 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:47 crc kubenswrapper[5003]: I0126 11:05:47.522159 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh4gv\" (UniqueName: \"kubernetes.io/projected/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694-kube-api-access-fh4gv\") on node \"crc\" DevicePath \"\"" Jan 26 11:05:48 crc kubenswrapper[5003]: I0126 11:05:48.296330 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" event={"ID":"2bc7f0bc-22bd-427a-87b9-d00ea1c2f694","Type":"ContainerDied","Data":"e0e5b01cf353ad98c5225da41c05a2a8003531f8ccc8f729916fdda6145022e9"} Jan 26 11:05:48 crc kubenswrapper[5003]: I0126 11:05:48.296396 5003 scope.go:117] "RemoveContainer" containerID="a4f87e9e7eeab211e14c1af3410ec6ff5b88e2d85a1d404a004ce6ca7e422e97" Jan 26 11:05:48 crc kubenswrapper[5003]: I0126 11:05:48.296400 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg" Jan 26 11:05:48 crc kubenswrapper[5003]: I0126 11:05:48.331459 5003 scope.go:117] "RemoveContainer" containerID="05366d27155f09233b67ea19715d9c8893d8571ecfb3131b98af5e9b5ca1b0ba" Jan 26 11:05:48 crc kubenswrapper[5003]: I0126 11:05:48.369024 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg"] Jan 26 11:05:48 crc kubenswrapper[5003]: I0126 11:05:48.377241 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-6bb4649ff-slvzg"] Jan 26 11:05:49 crc kubenswrapper[5003]: I0126 11:05:49.010725 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" path="/var/lib/kubelet/pods/2bc7f0bc-22bd-427a-87b9-d00ea1c2f694/volumes" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.545218 5003 generic.go:334] "Generic (PLEG): container finished" podID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerID="9525c4f6af285b83022dc7872ad7d96e0258131dd1b1b3a54621fece4d73b356" exitCode=137 Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.545298 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"9525c4f6af285b83022dc7872ad7d96e0258131dd1b1b3a54621fece4d73b356"} Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.545786 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"dbc6d977-1883-4a60-9768-39fe20a4c4ed","Type":"ContainerDied","Data":"e5aba6207790bb7a5e94cc2f4b75932ead8fc6d0ca091bbbde6bd881b42a9116"} Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.545802 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5aba6207790bb7a5e94cc2f4b75932ead8fc6d0ca091bbbde6bd881b42a9116" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.584404 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.767161 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.767271 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-cache\") pod \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.767314 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7sbx\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-kube-api-access-b7sbx\") pod \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.767383 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") pod \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.767445 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-lock\") pod \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\" (UID: \"dbc6d977-1883-4a60-9768-39fe20a4c4ed\") " Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.768336 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-cache" (OuterVolumeSpecName: "cache") pod "dbc6d977-1883-4a60-9768-39fe20a4c4ed" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.768362 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-lock" (OuterVolumeSpecName: "lock") pod "dbc6d977-1883-4a60-9768-39fe20a4c4ed" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.773398 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "swift") pod "dbc6d977-1883-4a60-9768-39fe20a4c4ed" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.773757 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-kube-api-access-b7sbx" (OuterVolumeSpecName: "kube-api-access-b7sbx") pod "dbc6d977-1883-4a60-9768-39fe20a4c4ed" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed"). InnerVolumeSpecName "kube-api-access-b7sbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.773890 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "dbc6d977-1883-4a60-9768-39fe20a4c4ed" (UID: "dbc6d977-1883-4a60-9768-39fe20a4c4ed"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.888875 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.888931 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7sbx\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-kube-api-access-b7sbx\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.888945 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dbc6d977-1883-4a60-9768-39fe20a4c4ed-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.888956 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/dbc6d977-1883-4a60-9768-39fe20a4c4ed-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.888986 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.903445 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 26 11:06:16 crc kubenswrapper[5003]: I0126 11:06:16.990985 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:17 crc kubenswrapper[5003]: I0126 11:06:17.552824 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:17 crc kubenswrapper[5003]: I0126 11:06:17.578734 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:06:17 crc kubenswrapper[5003]: I0126 11:06:17.585551 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.010334 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" path="/var/lib/kubelet/pods/dbc6d977-1883-4a60-9768-39fe20a4c4ed/volumes" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595182 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595553 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-server" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595574 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-server" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595587 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595598 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595615 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595623 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595633 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595640 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595648 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-reaper" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595655 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-reaper" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595668 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerName="proxy-server" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595675 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerName="proxy-server" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595691 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="rsync" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595700 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="rsync" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595713 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595721 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595734 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-server" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595741 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-server" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595753 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-server" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595760 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-server" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595773 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="swift-recon-cron" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595780 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="swift-recon-cron" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595790 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-updater" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595797 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-updater" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595809 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595816 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595828 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e3ebf8-f546-4806-a82d-bca78c5af057" containerName="swift-ring-rebalance" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595836 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e3ebf8-f546-4806-a82d-bca78c5af057" containerName="swift-ring-rebalance" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595849 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595857 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595869 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-expirer" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595876 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-expirer" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595887 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-updater" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595894 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-updater" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.595908 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerName="proxy-httpd" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.595916 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerName="proxy-httpd" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596081 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-reaper" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596095 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerName="proxy-server" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596107 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596116 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596127 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596136 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-server" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596146 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-expirer" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596156 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="rsync" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596167 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8e3ebf8-f546-4806-a82d-bca78c5af057" containerName="swift-ring-rebalance" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596177 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-updater" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596189 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bc7f0bc-22bd-427a-87b9-d00ea1c2f694" containerName="proxy-httpd" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596201 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596211 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-auditor" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596219 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="object-replicator" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596231 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-server" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596240 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="swift-recon-cron" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596251 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="account-server" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.596266 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc6d977-1883-4a60-9768-39fe20a4c4ed" containerName="container-updater" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.601043 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.603126 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.603151 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"combined-ca-bundle" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.603447 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.603573 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-jgsb8" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.604237 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.633255 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.723857 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.723913 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-cache\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.723981 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.724024 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-lock\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.724048 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d446295a-b150-4b07-a2ea-cf0c1fb28553-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.724068 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cp24x\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-kube-api-access-cp24x\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.825887 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.825934 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-cache\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.825987 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.826020 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-lock\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.826038 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d446295a-b150-4b07-a2ea-cf0c1fb28553-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.826056 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cp24x\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-kube-api-access-cp24x\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.826417 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") device mount path \"/mnt/openstack/pv09\"" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.826513 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.826531 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:06:19 crc kubenswrapper[5003]: E0126 11:06:19.826570 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift podName:d446295a-b150-4b07-a2ea-cf0c1fb28553 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:20.326554665 +0000 UTC m=+1395.867780226 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift") pod "swift-storage-0" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553") : configmap "swift-ring-files" not found Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.826777 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-cache\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.827737 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-lock\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.833708 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d446295a-b150-4b07-a2ea-cf0c1fb28553-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.848060 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.852435 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cp24x\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-kube-api-access-cp24x\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.893173 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44"] Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.894855 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.897970 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"cert-swift-public-svc" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.900390 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.900583 5003 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"cert-swift-internal-svc" Jan 26 11:06:19 crc kubenswrapper[5003]: I0126 11:06:19.906540 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44"] Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.011018 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-542kn"] Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.011999 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.015776 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.018231 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.027983 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-public-tls-certs\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.028028 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzhv8\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-kube-api-access-gzhv8\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.028065 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.028087 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-combined-ca-bundle\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.028133 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-config-data\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.028162 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-internal-tls-certs\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.028209 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-run-httpd\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.028230 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-log-httpd\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.030770 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-542kn"] Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.129975 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-internal-tls-certs\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130061 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-run-httpd\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130094 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-log-httpd\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130155 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w9l5\" (UniqueName: \"kubernetes.io/projected/ce2db101-ee1a-401c-97fa-32ac80739e03-kube-api-access-9w9l5\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130206 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-public-tls-certs\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130228 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-scripts\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130253 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-combined-ca-bundle\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130298 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzhv8\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-kube-api-access-gzhv8\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130328 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130349 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-combined-ca-bundle\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130372 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ce2db101-ee1a-401c-97fa-32ac80739e03-etc-swift\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130397 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-ring-data-devices\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130421 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-swiftconf\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130457 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-dispersionconf\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130492 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-config-data\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130688 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-log-httpd\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.130745 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-run-httpd\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.130802 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.130816 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44: configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.130862 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift podName:4e618c67-8bf8-4d28-b330-22d0cbd56cb5 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:20.630841649 +0000 UTC m=+1396.172067210 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift") pod "swift-proxy-fdbc998f6-jxc44" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5") : configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.139050 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-internal-tls-certs\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.139402 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-config-data\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.139911 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-combined-ca-bundle\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.147840 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-public-tls-certs\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.151550 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzhv8\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-kube-api-access-gzhv8\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.232138 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-combined-ca-bundle\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.232186 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-scripts\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.232219 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ce2db101-ee1a-401c-97fa-32ac80739e03-etc-swift\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.232236 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-ring-data-devices\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.232256 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-swiftconf\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.232312 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-dispersionconf\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.232410 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w9l5\" (UniqueName: \"kubernetes.io/projected/ce2db101-ee1a-401c-97fa-32ac80739e03-kube-api-access-9w9l5\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.233770 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ce2db101-ee1a-401c-97fa-32ac80739e03-etc-swift\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.234310 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-ring-data-devices\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.234674 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-scripts\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.237451 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-dispersionconf\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.237842 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-combined-ca-bundle\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.244886 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-swiftconf\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.254483 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w9l5\" (UniqueName: \"kubernetes.io/projected/ce2db101-ee1a-401c-97fa-32ac80739e03-kube-api-access-9w9l5\") pod \"swift-ring-rebalance-542kn\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.327802 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.333672 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.333882 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.333911 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.333964 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift podName:d446295a-b150-4b07-a2ea-cf0c1fb28553 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:21.333945722 +0000 UTC m=+1396.875171283 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift") pod "swift-storage-0" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553") : configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.638175 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.638645 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.638776 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44: configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: E0126 11:06:20.638845 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift podName:4e618c67-8bf8-4d28-b330-22d0cbd56cb5 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:21.638823372 +0000 UTC m=+1397.180048933 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift") pod "swift-proxy-fdbc998f6-jxc44" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5") : configmap "swift-ring-files" not found Jan 26 11:06:20 crc kubenswrapper[5003]: I0126 11:06:20.798055 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-542kn"] Jan 26 11:06:21 crc kubenswrapper[5003]: I0126 11:06:21.348726 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:21 crc kubenswrapper[5003]: E0126 11:06:21.348959 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:21 crc kubenswrapper[5003]: E0126 11:06:21.349207 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:06:21 crc kubenswrapper[5003]: E0126 11:06:21.349268 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift podName:d446295a-b150-4b07-a2ea-cf0c1fb28553 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:23.349248 +0000 UTC m=+1398.890473561 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift") pod "swift-storage-0" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553") : configmap "swift-ring-files" not found Jan 26 11:06:21 crc kubenswrapper[5003]: I0126 11:06:21.591445 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" event={"ID":"ce2db101-ee1a-401c-97fa-32ac80739e03","Type":"ContainerStarted","Data":"e9911461a08addd9f13a08160257457889a9722b9819d3456cc4a25adbf59a47"} Jan 26 11:06:21 crc kubenswrapper[5003]: I0126 11:06:21.591565 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" event={"ID":"ce2db101-ee1a-401c-97fa-32ac80739e03","Type":"ContainerStarted","Data":"3277c84850d6157ce05b80f32164180f9c1ca0da6abb21b212036aae4817eb1e"} Jan 26 11:06:21 crc kubenswrapper[5003]: I0126 11:06:21.636802 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" podStartSLOduration=2.636775253 podStartE2EDuration="2.636775253s" podCreationTimestamp="2026-01-26 11:06:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:06:21.63039579 +0000 UTC m=+1397.171621371" watchObservedRunningTime="2026-01-26 11:06:21.636775253 +0000 UTC m=+1397.178000814" Jan 26 11:06:21 crc kubenswrapper[5003]: I0126 11:06:21.653192 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:21 crc kubenswrapper[5003]: E0126 11:06:21.653461 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:21 crc kubenswrapper[5003]: E0126 11:06:21.653506 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44: configmap "swift-ring-files" not found Jan 26 11:06:21 crc kubenswrapper[5003]: E0126 11:06:21.653599 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift podName:4e618c67-8bf8-4d28-b330-22d0cbd56cb5 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:23.653578265 +0000 UTC m=+1399.194803826 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift") pod "swift-proxy-fdbc998f6-jxc44" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5") : configmap "swift-ring-files" not found Jan 26 11:06:23 crc kubenswrapper[5003]: I0126 11:06:23.384047 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:23 crc kubenswrapper[5003]: E0126 11:06:23.384373 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:23 crc kubenswrapper[5003]: E0126 11:06:23.384586 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:06:23 crc kubenswrapper[5003]: E0126 11:06:23.384676 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift podName:d446295a-b150-4b07-a2ea-cf0c1fb28553 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:27.384645803 +0000 UTC m=+1402.925871404 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift") pod "swift-storage-0" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553") : configmap "swift-ring-files" not found Jan 26 11:06:23 crc kubenswrapper[5003]: I0126 11:06:23.689676 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:23 crc kubenswrapper[5003]: E0126 11:06:23.690009 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:23 crc kubenswrapper[5003]: E0126 11:06:23.690060 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44: configmap "swift-ring-files" not found Jan 26 11:06:23 crc kubenswrapper[5003]: E0126 11:06:23.690175 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift podName:4e618c67-8bf8-4d28-b330-22d0cbd56cb5 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:27.690140422 +0000 UTC m=+1403.231365993 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift") pod "swift-proxy-fdbc998f6-jxc44" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5") : configmap "swift-ring-files" not found Jan 26 11:06:27 crc kubenswrapper[5003]: I0126 11:06:27.460226 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:27 crc kubenswrapper[5003]: E0126 11:06:27.460504 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:27 crc kubenswrapper[5003]: E0126 11:06:27.460740 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 26 11:06:27 crc kubenswrapper[5003]: E0126 11:06:27.460807 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift podName:d446295a-b150-4b07-a2ea-cf0c1fb28553 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:35.460784633 +0000 UTC m=+1411.002010265 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift") pod "swift-storage-0" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553") : configmap "swift-ring-files" not found Jan 26 11:06:27 crc kubenswrapper[5003]: I0126 11:06:27.765155 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:27 crc kubenswrapper[5003]: E0126 11:06:27.765310 5003 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 26 11:06:27 crc kubenswrapper[5003]: E0126 11:06:27.765336 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44: configmap "swift-ring-files" not found Jan 26 11:06:27 crc kubenswrapper[5003]: E0126 11:06:27.765399 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift podName:4e618c67-8bf8-4d28-b330-22d0cbd56cb5 nodeName:}" failed. No retries permitted until 2026-01-26 11:06:35.765381086 +0000 UTC m=+1411.306606657 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift") pod "swift-proxy-fdbc998f6-jxc44" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5") : configmap "swift-ring-files" not found Jan 26 11:06:28 crc kubenswrapper[5003]: I0126 11:06:28.660094 5003 generic.go:334] "Generic (PLEG): container finished" podID="ce2db101-ee1a-401c-97fa-32ac80739e03" containerID="e9911461a08addd9f13a08160257457889a9722b9819d3456cc4a25adbf59a47" exitCode=0 Jan 26 11:06:28 crc kubenswrapper[5003]: I0126 11:06:28.660408 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" event={"ID":"ce2db101-ee1a-401c-97fa-32ac80739e03","Type":"ContainerDied","Data":"e9911461a08addd9f13a08160257457889a9722b9819d3456cc4a25adbf59a47"} Jan 26 11:06:29 crc kubenswrapper[5003]: I0126 11:06:29.963863 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.111192 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w9l5\" (UniqueName: \"kubernetes.io/projected/ce2db101-ee1a-401c-97fa-32ac80739e03-kube-api-access-9w9l5\") pod \"ce2db101-ee1a-401c-97fa-32ac80739e03\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.111249 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-ring-data-devices\") pod \"ce2db101-ee1a-401c-97fa-32ac80739e03\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.111302 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-combined-ca-bundle\") pod \"ce2db101-ee1a-401c-97fa-32ac80739e03\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.111354 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-dispersionconf\") pod \"ce2db101-ee1a-401c-97fa-32ac80739e03\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.111390 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ce2db101-ee1a-401c-97fa-32ac80739e03-etc-swift\") pod \"ce2db101-ee1a-401c-97fa-32ac80739e03\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.111422 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-scripts\") pod \"ce2db101-ee1a-401c-97fa-32ac80739e03\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.111476 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-swiftconf\") pod \"ce2db101-ee1a-401c-97fa-32ac80739e03\" (UID: \"ce2db101-ee1a-401c-97fa-32ac80739e03\") " Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.112662 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce2db101-ee1a-401c-97fa-32ac80739e03-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "ce2db101-ee1a-401c-97fa-32ac80739e03" (UID: "ce2db101-ee1a-401c-97fa-32ac80739e03"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.112903 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "ce2db101-ee1a-401c-97fa-32ac80739e03" (UID: "ce2db101-ee1a-401c-97fa-32ac80739e03"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.121782 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce2db101-ee1a-401c-97fa-32ac80739e03-kube-api-access-9w9l5" (OuterVolumeSpecName: "kube-api-access-9w9l5") pod "ce2db101-ee1a-401c-97fa-32ac80739e03" (UID: "ce2db101-ee1a-401c-97fa-32ac80739e03"). InnerVolumeSpecName "kube-api-access-9w9l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.133200 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce2db101-ee1a-401c-97fa-32ac80739e03" (UID: "ce2db101-ee1a-401c-97fa-32ac80739e03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.133693 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "ce2db101-ee1a-401c-97fa-32ac80739e03" (UID: "ce2db101-ee1a-401c-97fa-32ac80739e03"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.137863 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-scripts" (OuterVolumeSpecName: "scripts") pod "ce2db101-ee1a-401c-97fa-32ac80739e03" (UID: "ce2db101-ee1a-401c-97fa-32ac80739e03"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.142988 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "ce2db101-ee1a-401c-97fa-32ac80739e03" (UID: "ce2db101-ee1a-401c-97fa-32ac80739e03"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.212897 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w9l5\" (UniqueName: \"kubernetes.io/projected/ce2db101-ee1a-401c-97fa-32ac80739e03-kube-api-access-9w9l5\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.212945 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.212954 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.212966 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.212977 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ce2db101-ee1a-401c-97fa-32ac80739e03-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.212985 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ce2db101-ee1a-401c-97fa-32ac80739e03-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.212993 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ce2db101-ee1a-401c-97fa-32ac80739e03-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.678346 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" event={"ID":"ce2db101-ee1a-401c-97fa-32ac80739e03","Type":"ContainerDied","Data":"3277c84850d6157ce05b80f32164180f9c1ca0da6abb21b212036aae4817eb1e"} Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.678397 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3277c84850d6157ce05b80f32164180f9c1ca0da6abb21b212036aae4817eb1e" Jan 26 11:06:30 crc kubenswrapper[5003]: I0126 11:06:30.678457 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-542kn" Jan 26 11:06:35 crc kubenswrapper[5003]: I0126 11:06:35.488403 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:35 crc kubenswrapper[5003]: I0126 11:06:35.503607 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"swift-storage-0\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:35 crc kubenswrapper[5003]: I0126 11:06:35.517362 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:06:35 crc kubenswrapper[5003]: I0126 11:06:35.793131 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:35 crc kubenswrapper[5003]: I0126 11:06:35.802035 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"swift-proxy-fdbc998f6-jxc44\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:35 crc kubenswrapper[5003]: I0126 11:06:35.820251 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:35 crc kubenswrapper[5003]: I0126 11:06:35.999413 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:06:36 crc kubenswrapper[5003]: W0126 11:06:36.286815 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e618c67_8bf8_4d28_b330_22d0cbd56cb5.slice/crio-24106c5ef05b7072d56e2a98cf25be4744f6043b10b66c6b659e8370398db302 WatchSource:0}: Error finding container 24106c5ef05b7072d56e2a98cf25be4744f6043b10b66c6b659e8370398db302: Status 404 returned error can't find the container with id 24106c5ef05b7072d56e2a98cf25be4744f6043b10b66c6b659e8370398db302 Jan 26 11:06:36 crc kubenswrapper[5003]: I0126 11:06:36.287233 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44"] Jan 26 11:06:36 crc kubenswrapper[5003]: I0126 11:06:36.722343 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" event={"ID":"4e618c67-8bf8-4d28-b330-22d0cbd56cb5","Type":"ContainerStarted","Data":"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7"} Jan 26 11:06:36 crc kubenswrapper[5003]: I0126 11:06:36.722398 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" event={"ID":"4e618c67-8bf8-4d28-b330-22d0cbd56cb5","Type":"ContainerStarted","Data":"24106c5ef05b7072d56e2a98cf25be4744f6043b10b66c6b659e8370398db302"} Jan 26 11:06:36 crc kubenswrapper[5003]: I0126 11:06:36.725537 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"5eaf8fd7a9541d6ecfd0db15ef13773e6e2560b30dc15a6f5186fbbc0110ba52"} Jan 26 11:06:36 crc kubenswrapper[5003]: I0126 11:06:36.725637 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"24eab1403956f1736a6551ce380aad9e6271e0fd4f6826b5076463e24266977f"} Jan 26 11:06:36 crc kubenswrapper[5003]: I0126 11:06:36.725696 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"d7fb6878a92ee87d7db265eadfae05b3d0985acd163e665320512d839d610a3f"} Jan 26 11:06:36 crc kubenswrapper[5003]: I0126 11:06:36.725763 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"cfabb113453ca4f73be92a3981601ac100dac3ab00bc4edb6811889fd3aff51f"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.734703 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" event={"ID":"4e618c67-8bf8-4d28-b330-22d0cbd56cb5","Type":"ContainerStarted","Data":"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.735237 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.735263 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.749947 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"dd9212d7099a51e074a1a2848caa2609023fe3642ce7bc2ea60b12678db15303"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.749989 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"2762f3e7b5eeaffe30706cd14ed8eeace62d4b088b74d37c258fd690c22363a4"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.749997 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"09c77f38af2391a61d05a4f86f8d38370c6798f914310c787187c33e9665820f"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.750007 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"4c32f17ff700698e9f6dc94973c47fc1e273b44494b6a3427b8b079646256ced"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.750018 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"42f325d7b84c82e4f613405b4c7f0319f3de962a3658069d862d91a3503882d9"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.750029 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"0770c050ea8a958bb547dcb3d5c14051dcb8b1ccd15bfafed31c3f865fefdbbd"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.750040 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"44b815891ac54b4652e96219b9c9b5d317eba00737b48864dd7b056ddb2b38d7"} Jan 26 11:06:37 crc kubenswrapper[5003]: I0126 11:06:37.762034 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" podStartSLOduration=18.762014113 podStartE2EDuration="18.762014113s" podCreationTimestamp="2026-01-26 11:06:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:06:37.75840662 +0000 UTC m=+1413.299632181" watchObservedRunningTime="2026-01-26 11:06:37.762014113 +0000 UTC m=+1413.303239684" Jan 26 11:06:38 crc kubenswrapper[5003]: I0126 11:06:38.767843 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"4907090f0c08ce892242dc6da0c571bfff88a65efbea27edf6c9bf1e7a8726e2"} Jan 26 11:06:38 crc kubenswrapper[5003]: I0126 11:06:38.768145 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"40b32c6fc232474ff05130b599d33628d15d5395bc63c6a8e36c395a5987dbf7"} Jan 26 11:06:38 crc kubenswrapper[5003]: I0126 11:06:38.768163 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"d65b34100918b96132397a5eee7049d097c8c2b314d41f577fb2fea7f465e561"} Jan 26 11:06:38 crc kubenswrapper[5003]: I0126 11:06:38.768176 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"af0a0d209d83ae052e0b10fafada744ffd209e151109e3d6280db46737d11aa0"} Jan 26 11:06:38 crc kubenswrapper[5003]: I0126 11:06:38.768214 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerStarted","Data":"2d3bb1e14fc5fa885525112e4de1e42cb1b952ff3d6d294295aca57f2c065bde"} Jan 26 11:06:38 crc kubenswrapper[5003]: I0126 11:06:38.815451 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=20.815432044 podStartE2EDuration="20.815432044s" podCreationTimestamp="2026-01-26 11:06:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:06:38.807207998 +0000 UTC m=+1414.348433579" watchObservedRunningTime="2026-01-26 11:06:38.815432044 +0000 UTC m=+1414.356657595" Jan 26 11:06:45 crc kubenswrapper[5003]: I0126 11:06:45.826220 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:45 crc kubenswrapper[5003]: I0126 11:06:45.831698 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.774904 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.775687 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-server" containerID="cri-o://d7fb6878a92ee87d7db265eadfae05b3d0985acd163e665320512d839d610a3f" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.775799 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-server" containerID="cri-o://2762f3e7b5eeaffe30706cd14ed8eeace62d4b088b74d37c258fd690c22363a4" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.775839 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="swift-recon-cron" containerID="cri-o://4907090f0c08ce892242dc6da0c571bfff88a65efbea27edf6c9bf1e7a8726e2" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.775938 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-updater" containerID="cri-o://af0a0d209d83ae052e0b10fafada744ffd209e151109e3d6280db46737d11aa0" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.775889 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-expirer" containerID="cri-o://d65b34100918b96132397a5eee7049d097c8c2b314d41f577fb2fea7f465e561" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.776010 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-server" containerID="cri-o://0770c050ea8a958bb547dcb3d5c14051dcb8b1ccd15bfafed31c3f865fefdbbd" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.775994 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-replicator" containerID="cri-o://dd9212d7099a51e074a1a2848caa2609023fe3642ce7bc2ea60b12678db15303" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.776067 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-replicator" containerID="cri-o://42f325d7b84c82e4f613405b4c7f0319f3de962a3658069d862d91a3503882d9" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.775983 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-auditor" containerID="cri-o://2d3bb1e14fc5fa885525112e4de1e42cb1b952ff3d6d294295aca57f2c065bde" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.776133 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-reaper" containerID="cri-o://44b815891ac54b4652e96219b9c9b5d317eba00737b48864dd7b056ddb2b38d7" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.776152 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-updater" containerID="cri-o://09c77f38af2391a61d05a4f86f8d38370c6798f914310c787187c33e9665820f" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.776099 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-auditor" containerID="cri-o://4c32f17ff700698e9f6dc94973c47fc1e273b44494b6a3427b8b079646256ced" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.776175 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="rsync" containerID="cri-o://40b32c6fc232474ff05130b599d33628d15d5395bc63c6a8e36c395a5987dbf7" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.776119 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-auditor" containerID="cri-o://5eaf8fd7a9541d6ecfd0db15ef13773e6e2560b30dc15a6f5186fbbc0110ba52" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.776202 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-replicator" containerID="cri-o://24eab1403956f1736a6551ce380aad9e6271e0fd4f6826b5076463e24266977f" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.799621 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-542kn"] Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.812628 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-542kn"] Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.843709 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44"] Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.852678 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerName="proxy-httpd" containerID="cri-o://27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7" gracePeriod=30 Jan 26 11:06:47 crc kubenswrapper[5003]: I0126 11:06:47.852726 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerName="proxy-server" containerID="cri-o://2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47" gracePeriod=30 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.729704 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.753745 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-config-data\") pod \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.753833 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-public-tls-certs\") pod \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.753889 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-run-httpd\") pod \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.753909 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-combined-ca-bundle\") pod \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.753937 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") pod \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.754047 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-internal-tls-certs\") pod \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.754108 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-log-httpd\") pod \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.754160 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzhv8\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-kube-api-access-gzhv8\") pod \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\" (UID: \"4e618c67-8bf8-4d28-b330-22d0cbd56cb5\") " Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.754995 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4e618c67-8bf8-4d28-b330-22d0cbd56cb5" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.755245 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4e618c67-8bf8-4d28-b330-22d0cbd56cb5" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.764077 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-kube-api-access-gzhv8" (OuterVolumeSpecName: "kube-api-access-gzhv8") pod "4e618c67-8bf8-4d28-b330-22d0cbd56cb5" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5"). InnerVolumeSpecName "kube-api-access-gzhv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.764916 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "4e618c67-8bf8-4d28-b330-22d0cbd56cb5" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.802518 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e618c67-8bf8-4d28-b330-22d0cbd56cb5" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.804836 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-config-data" (OuterVolumeSpecName: "config-data") pod "4e618c67-8bf8-4d28-b330-22d0cbd56cb5" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.812512 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4e618c67-8bf8-4d28-b330-22d0cbd56cb5" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.815723 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4e618c67-8bf8-4d28-b330-22d0cbd56cb5" (UID: "4e618c67-8bf8-4d28-b330-22d0cbd56cb5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.856368 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.856411 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.856424 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzhv8\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-kube-api-access-gzhv8\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.856440 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.856455 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.856470 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.856487 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.856504 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4e618c67-8bf8-4d28-b330-22d0cbd56cb5-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.867682 5003 generic.go:334] "Generic (PLEG): container finished" podID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerID="2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.867734 5003 generic.go:334] "Generic (PLEG): container finished" podID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerID="27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.867755 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" event={"ID":"4e618c67-8bf8-4d28-b330-22d0cbd56cb5","Type":"ContainerDied","Data":"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.867784 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.867815 5003 scope.go:117] "RemoveContainer" containerID="2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.867798 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" event={"ID":"4e618c67-8bf8-4d28-b330-22d0cbd56cb5","Type":"ContainerDied","Data":"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.867910 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44" event={"ID":"4e618c67-8bf8-4d28-b330-22d0cbd56cb5","Type":"ContainerDied","Data":"24106c5ef05b7072d56e2a98cf25be4744f6043b10b66c6b659e8370398db302"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876371 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="40b32c6fc232474ff05130b599d33628d15d5395bc63c6a8e36c395a5987dbf7" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876409 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="d65b34100918b96132397a5eee7049d097c8c2b314d41f577fb2fea7f465e561" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876410 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"40b32c6fc232474ff05130b599d33628d15d5395bc63c6a8e36c395a5987dbf7"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876468 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"d65b34100918b96132397a5eee7049d097c8c2b314d41f577fb2fea7f465e561"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876493 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"af0a0d209d83ae052e0b10fafada744ffd209e151109e3d6280db46737d11aa0"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876422 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="af0a0d209d83ae052e0b10fafada744ffd209e151109e3d6280db46737d11aa0" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876517 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="2d3bb1e14fc5fa885525112e4de1e42cb1b952ff3d6d294295aca57f2c065bde" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876529 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="dd9212d7099a51e074a1a2848caa2609023fe3642ce7bc2ea60b12678db15303" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876536 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="2762f3e7b5eeaffe30706cd14ed8eeace62d4b088b74d37c258fd690c22363a4" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876544 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="09c77f38af2391a61d05a4f86f8d38370c6798f914310c787187c33e9665820f" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876552 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="4c32f17ff700698e9f6dc94973c47fc1e273b44494b6a3427b8b079646256ced" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876560 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="42f325d7b84c82e4f613405b4c7f0319f3de962a3658069d862d91a3503882d9" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876567 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="0770c050ea8a958bb547dcb3d5c14051dcb8b1ccd15bfafed31c3f865fefdbbd" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876576 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="44b815891ac54b4652e96219b9c9b5d317eba00737b48864dd7b056ddb2b38d7" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876585 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="5eaf8fd7a9541d6ecfd0db15ef13773e6e2560b30dc15a6f5186fbbc0110ba52" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876593 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="24eab1403956f1736a6551ce380aad9e6271e0fd4f6826b5076463e24266977f" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876604 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="d7fb6878a92ee87d7db265eadfae05b3d0985acd163e665320512d839d610a3f" exitCode=0 Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876605 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"2d3bb1e14fc5fa885525112e4de1e42cb1b952ff3d6d294295aca57f2c065bde"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876629 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"dd9212d7099a51e074a1a2848caa2609023fe3642ce7bc2ea60b12678db15303"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876643 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"2762f3e7b5eeaffe30706cd14ed8eeace62d4b088b74d37c258fd690c22363a4"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876655 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"09c77f38af2391a61d05a4f86f8d38370c6798f914310c787187c33e9665820f"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876666 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"4c32f17ff700698e9f6dc94973c47fc1e273b44494b6a3427b8b079646256ced"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876678 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"42f325d7b84c82e4f613405b4c7f0319f3de962a3658069d862d91a3503882d9"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876691 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"0770c050ea8a958bb547dcb3d5c14051dcb8b1ccd15bfafed31c3f865fefdbbd"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876703 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"44b815891ac54b4652e96219b9c9b5d317eba00737b48864dd7b056ddb2b38d7"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"5eaf8fd7a9541d6ecfd0db15ef13773e6e2560b30dc15a6f5186fbbc0110ba52"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876729 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"24eab1403956f1736a6551ce380aad9e6271e0fd4f6826b5076463e24266977f"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.876741 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"d7fb6878a92ee87d7db265eadfae05b3d0985acd163e665320512d839d610a3f"} Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.899855 5003 scope.go:117] "RemoveContainer" containerID="27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.958803 5003 scope.go:117] "RemoveContainer" containerID="2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47" Jan 26 11:06:48 crc kubenswrapper[5003]: E0126 11:06:48.959492 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47\": container with ID starting with 2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47 not found: ID does not exist" containerID="2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.959536 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47"} err="failed to get container status \"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47\": rpc error: code = NotFound desc = could not find container \"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47\": container with ID starting with 2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47 not found: ID does not exist" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.959560 5003 scope.go:117] "RemoveContainer" containerID="27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7" Jan 26 11:06:48 crc kubenswrapper[5003]: E0126 11:06:48.960462 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7\": container with ID starting with 27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7 not found: ID does not exist" containerID="27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.960501 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7"} err="failed to get container status \"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7\": rpc error: code = NotFound desc = could not find container \"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7\": container with ID starting with 27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7 not found: ID does not exist" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.960520 5003 scope.go:117] "RemoveContainer" containerID="2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.960809 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47"} err="failed to get container status \"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47\": rpc error: code = NotFound desc = could not find container \"2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47\": container with ID starting with 2159b0b1460b770f46df9502eade2b644d2c9a7bfd5347fe14f40481d1180f47 not found: ID does not exist" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.960836 5003 scope.go:117] "RemoveContainer" containerID="27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.962397 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7"} err="failed to get container status \"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7\": rpc error: code = NotFound desc = could not find container \"27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7\": container with ID starting with 27d67f3abd597ec5646bf578b9346abc8bfefa31d1d072980bbefcc9b85aebd7 not found: ID does not exist" Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.964249 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44"] Jan 26 11:06:48 crc kubenswrapper[5003]: I0126 11:06:48.969908 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-fdbc998f6-jxc44"] Jan 26 11:06:49 crc kubenswrapper[5003]: I0126 11:06:49.008485 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" path="/var/lib/kubelet/pods/4e618c67-8bf8-4d28-b330-22d0cbd56cb5/volumes" Jan 26 11:06:49 crc kubenswrapper[5003]: I0126 11:06:49.009233 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce2db101-ee1a-401c-97fa-32ac80739e03" path="/var/lib/kubelet/pods/ce2db101-ee1a-401c-97fa-32ac80739e03/volumes" Jan 26 11:07:05 crc kubenswrapper[5003]: I0126 11:07:05.839392 5003 scope.go:117] "RemoveContainer" containerID="0c027d858c4da8f5f47b15b900f9a9f7e93e78053e7a49d474bac09254c88b7b" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.152466 5003 generic.go:334] "Generic (PLEG): container finished" podID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerID="4907090f0c08ce892242dc6da0c571bfff88a65efbea27edf6c9bf1e7a8726e2" exitCode=137 Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.152504 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"4907090f0c08ce892242dc6da0c571bfff88a65efbea27edf6c9bf1e7a8726e2"} Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.153365 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"d446295a-b150-4b07-a2ea-cf0c1fb28553","Type":"ContainerDied","Data":"cfabb113453ca4f73be92a3981601ac100dac3ab00bc4edb6811889fd3aff51f"} Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.153435 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfabb113453ca4f73be92a3981601ac100dac3ab00bc4edb6811889fd3aff51f" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.198691 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.339599 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"d446295a-b150-4b07-a2ea-cf0c1fb28553\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.339725 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d446295a-b150-4b07-a2ea-cf0c1fb28553-combined-ca-bundle\") pod \"d446295a-b150-4b07-a2ea-cf0c1fb28553\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.339765 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") pod \"d446295a-b150-4b07-a2ea-cf0c1fb28553\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.339866 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-lock\") pod \"d446295a-b150-4b07-a2ea-cf0c1fb28553\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.339903 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-cache\") pod \"d446295a-b150-4b07-a2ea-cf0c1fb28553\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.340164 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cp24x\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-kube-api-access-cp24x\") pod \"d446295a-b150-4b07-a2ea-cf0c1fb28553\" (UID: \"d446295a-b150-4b07-a2ea-cf0c1fb28553\") " Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.340365 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-lock" (OuterVolumeSpecName: "lock") pod "d446295a-b150-4b07-a2ea-cf0c1fb28553" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.340734 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-cache" (OuterVolumeSpecName: "cache") pod "d446295a-b150-4b07-a2ea-cf0c1fb28553" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.340753 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-lock\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.345364 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "swift") pod "d446295a-b150-4b07-a2ea-cf0c1fb28553" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.345810 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-kube-api-access-cp24x" (OuterVolumeSpecName: "kube-api-access-cp24x") pod "d446295a-b150-4b07-a2ea-cf0c1fb28553" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553"). InnerVolumeSpecName "kube-api-access-cp24x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.347363 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d446295a-b150-4b07-a2ea-cf0c1fb28553" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.441999 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d446295a-b150-4b07-a2ea-cf0c1fb28553-cache\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.442040 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cp24x\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-kube-api-access-cp24x\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.442078 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.442092 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d446295a-b150-4b07-a2ea-cf0c1fb28553-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.460650 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.543110 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.596782 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d446295a-b150-4b07-a2ea-cf0c1fb28553-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d446295a-b150-4b07-a2ea-cf0c1fb28553" (UID: "d446295a-b150-4b07-a2ea-cf0c1fb28553"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:18 crc kubenswrapper[5003]: I0126 11:07:18.644201 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d446295a-b150-4b07-a2ea-cf0c1fb28553-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:19 crc kubenswrapper[5003]: I0126 11:07:19.161761 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Jan 26 11:07:19 crc kubenswrapper[5003]: I0126 11:07:19.194266 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:07:19 crc kubenswrapper[5003]: I0126 11:07:19.206850 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Jan 26 11:07:21 crc kubenswrapper[5003]: I0126 11:07:21.013426 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" path="/var/lib/kubelet/pods/d446295a-b150-4b07-a2ea-cf0c1fb28553/volumes" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.013422 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-dhfpd"] Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.014082 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-dhfpd"] Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.063593 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-api-789587dbb8-kcmjx"] Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.063808 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api-log" containerID="cri-o://1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28" gracePeriod=30 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.064126 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api" containerID="cri-o://c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd" gracePeriod=30 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.076839 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbicancbfe-account-delete-9rhg6"] Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077370 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerName="proxy-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077384 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerName="proxy-server" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077399 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077406 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077415 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-updater" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077423 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-updater" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077432 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-updater" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077438 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-updater" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077444 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="rsync" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077449 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="rsync" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077461 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="swift-recon-cron" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077467 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="swift-recon-cron" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077476 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077481 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-server" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077494 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077522 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-server" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077533 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077539 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077549 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerName="proxy-httpd" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077556 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerName="proxy-httpd" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077564 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077570 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077579 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce2db101-ee1a-401c-97fa-32ac80739e03" containerName="swift-ring-rebalance" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077584 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce2db101-ee1a-401c-97fa-32ac80739e03" containerName="swift-ring-rebalance" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077593 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-reaper" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077598 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-reaper" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077608 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077614 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077622 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077628 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077637 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-expirer" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077644 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-expirer" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077656 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077661 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: E0126 11:07:25.077670 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077676 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077785 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077792 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077805 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077813 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077820 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="swift-recon-cron" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077829 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-updater" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077838 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-reaper" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077847 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077854 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="rsync" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077861 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerName="proxy-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077871 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077878 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="account-replicator" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077884 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-expirer" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077893 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-updater" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077899 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e618c67-8bf8-4d28-b330-22d0cbd56cb5" containerName="proxy-httpd" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077909 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="object-server" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077915 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d446295a-b150-4b07-a2ea-cf0c1fb28553" containerName="container-auditor" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.077922 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce2db101-ee1a-401c-97fa-32ac80739e03" containerName="swift-ring-rebalance" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.078404 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.096686 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77"] Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.096928 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerName="barbican-keystone-listener-log" containerID="cri-o://190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c" gracePeriod=30 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.097446 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerName="barbican-keystone-listener" containerID="cri-o://cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6" gracePeriod=30 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.102724 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-worker-85d88cd875-jg89w"] Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.102992 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" containerName="barbican-worker-log" containerID="cri-o://e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17" gracePeriod=30 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.103116 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" containerName="barbican-worker" containerID="cri-o://b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17" gracePeriod=30 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.111821 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbicancbfe-account-delete-9rhg6"] Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.236467 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerID="1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28" exitCode=143 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.236561 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" event={"ID":"8e9f91e0-3cda-46fd-9034-08b41bf5f546","Type":"ContainerDied","Data":"1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28"} Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.238514 5003 generic.go:334] "Generic (PLEG): container finished" podID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerID="190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c" exitCode=143 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.238566 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" event={"ID":"3ca7900c-1191-4d34-a44f-29fd6d510d90","Type":"ContainerDied","Data":"190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c"} Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.240312 5003 generic.go:334] "Generic (PLEG): container finished" podID="0aac059e-645d-4967-838a-e51e27aad2ac" containerID="e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17" exitCode=143 Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.240356 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" event={"ID":"0aac059e-645d-4967-838a-e51e27aad2ac","Type":"ContainerDied","Data":"e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17"} Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.256452 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvpcn\" (UniqueName: \"kubernetes.io/projected/e003e40c-8a7e-434a-b20a-1ac895a8d682-kube-api-access-hvpcn\") pod \"barbicancbfe-account-delete-9rhg6\" (UID: \"e003e40c-8a7e-434a-b20a-1ac895a8d682\") " pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.256603 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e003e40c-8a7e-434a-b20a-1ac895a8d682-operator-scripts\") pod \"barbicancbfe-account-delete-9rhg6\" (UID: \"e003e40c-8a7e-434a-b20a-1ac895a8d682\") " pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.357386 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e003e40c-8a7e-434a-b20a-1ac895a8d682-operator-scripts\") pod \"barbicancbfe-account-delete-9rhg6\" (UID: \"e003e40c-8a7e-434a-b20a-1ac895a8d682\") " pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.357467 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvpcn\" (UniqueName: \"kubernetes.io/projected/e003e40c-8a7e-434a-b20a-1ac895a8d682-kube-api-access-hvpcn\") pod \"barbicancbfe-account-delete-9rhg6\" (UID: \"e003e40c-8a7e-434a-b20a-1ac895a8d682\") " pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.358235 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e003e40c-8a7e-434a-b20a-1ac895a8d682-operator-scripts\") pod \"barbicancbfe-account-delete-9rhg6\" (UID: \"e003e40c-8a7e-434a-b20a-1ac895a8d682\") " pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.375831 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvpcn\" (UniqueName: \"kubernetes.io/projected/e003e40c-8a7e-434a-b20a-1ac895a8d682-kube-api-access-hvpcn\") pod \"barbicancbfe-account-delete-9rhg6\" (UID: \"e003e40c-8a7e-434a-b20a-1ac895a8d682\") " pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.412831 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:25 crc kubenswrapper[5003]: I0126 11:07:25.827457 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbicancbfe-account-delete-9rhg6"] Jan 26 11:07:25 crc kubenswrapper[5003]: W0126 11:07:25.834305 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode003e40c_8a7e_434a_b20a_1ac895a8d682.slice/crio-ffcdacd84e7313c601b98bad2cfc189ad23d0613cae0215e704ea687bb992470 WatchSource:0}: Error finding container ffcdacd84e7313c601b98bad2cfc189ad23d0613cae0215e704ea687bb992470: Status 404 returned error can't find the container with id ffcdacd84e7313c601b98bad2cfc189ad23d0613cae0215e704ea687bb992470 Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.257270 5003 generic.go:334] "Generic (PLEG): container finished" podID="e003e40c-8a7e-434a-b20a-1ac895a8d682" containerID="f986d3cb7935c14181483f8da787cf796fab5b4480960ed3ae2a7eaa7eef5fb0" exitCode=0 Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.257416 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" event={"ID":"e003e40c-8a7e-434a-b20a-1ac895a8d682","Type":"ContainerDied","Data":"f986d3cb7935c14181483f8da787cf796fab5b4480960ed3ae2a7eaa7eef5fb0"} Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.257609 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" event={"ID":"e003e40c-8a7e-434a-b20a-1ac895a8d682","Type":"ContainerStarted","Data":"ffcdacd84e7313c601b98bad2cfc189ad23d0613cae0215e704ea687bb992470"} Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.391209 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-f9445869f-dzj8v"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.391515 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" podUID="93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" containerName="keystone-api" containerID="cri-o://6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a" gracePeriod=30 Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.406736 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-vmrv9"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.412546 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2g92"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.425197 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2g92"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.434589 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-vmrv9"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.441989 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-cron-29490421-k962l"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.448355 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-cron-29490421-k962l"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.453118 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone2760-account-delete-52t5q"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.454313 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.459361 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone2760-account-delete-52t5q"] Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.576455 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts\") pod \"keystone2760-account-delete-52t5q\" (UID: \"afd987da-215e-4edc-940c-1529b3531bf1\") " pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.576703 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk5zv\" (UniqueName: \"kubernetes.io/projected/afd987da-215e-4edc-940c-1529b3531bf1-kube-api-access-rk5zv\") pod \"keystone2760-account-delete-52t5q\" (UID: \"afd987da-215e-4edc-940c-1529b3531bf1\") " pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.678666 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts\") pod \"keystone2760-account-delete-52t5q\" (UID: \"afd987da-215e-4edc-940c-1529b3531bf1\") " pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.678769 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk5zv\" (UniqueName: \"kubernetes.io/projected/afd987da-215e-4edc-940c-1529b3531bf1-kube-api-access-rk5zv\") pod \"keystone2760-account-delete-52t5q\" (UID: \"afd987da-215e-4edc-940c-1529b3531bf1\") " pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.679660 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts\") pod \"keystone2760-account-delete-52t5q\" (UID: \"afd987da-215e-4edc-940c-1529b3531bf1\") " pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.703327 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk5zv\" (UniqueName: \"kubernetes.io/projected/afd987da-215e-4edc-940c-1529b3531bf1-kube-api-access-rk5zv\") pod \"keystone2760-account-delete-52t5q\" (UID: \"afd987da-215e-4edc-940c-1529b3531bf1\") " pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:07:26 crc kubenswrapper[5003]: I0126 11:07:26.774315 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:07:27 crc kubenswrapper[5003]: W0126 11:07:27.015445 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafd987da_215e_4edc_940c_1529b3531bf1.slice/crio-b146ae51b1a3006f16f8a942ad82d449e9e3444f44d16d727dd0da9b25beea9b WatchSource:0}: Error finding container b146ae51b1a3006f16f8a942ad82d449e9e3444f44d16d727dd0da9b25beea9b: Status 404 returned error can't find the container with id b146ae51b1a3006f16f8a942ad82d449e9e3444f44d16d727dd0da9b25beea9b Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.030242 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39e8f294-ba0f-4524-914c-501145d935b5" path="/var/lib/kubelet/pods/39e8f294-ba0f-4524-914c-501145d935b5/volumes" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.031932 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f577648-3365-45a7-99d9-676747c83c31" path="/var/lib/kubelet/pods/3f577648-3365-45a7-99d9-676747c83c31/volumes" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.032606 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e14a5862-4c4d-4a12-b110-03285b32d28c" path="/var/lib/kubelet/pods/e14a5862-4c4d-4a12-b110-03285b32d28c/volumes" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.033211 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e90022bf-9ff0-432e-92ea-8059ae78aada" path="/var/lib/kubelet/pods/e90022bf-9ff0-432e-92ea-8059ae78aada/volumes" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.033790 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone2760-account-delete-52t5q"] Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.076590 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/root-account-create-update-x8p2x"] Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.098903 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/root-account-create-update-x8p2x"] Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.116555 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.122536 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.133219 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.269921 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" event={"ID":"afd987da-215e-4edc-940c-1529b3531bf1","Type":"ContainerStarted","Data":"d63d633fd5857b51aa6161ab6b3094bda0ede0e4f9ede3bcf3363f7df4090c1e"} Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.269974 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" event={"ID":"afd987da-215e-4edc-940c-1529b3531bf1","Type":"ContainerStarted","Data":"b146ae51b1a3006f16f8a942ad82d449e9e3444f44d16d727dd0da9b25beea9b"} Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.270620 5003 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" secret="" err="secret \"galera-openstack-dockercfg-kkmfn\" not found" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.287271 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" podStartSLOduration=1.287249131 podStartE2EDuration="1.287249131s" podCreationTimestamp="2026-01-26 11:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:07:27.286341775 +0000 UTC m=+1462.827567336" watchObservedRunningTime="2026-01-26 11:07:27.287249131 +0000 UTC m=+1462.828474692" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.340305 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/openstack-galera-2" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerName="galera" containerID="cri-o://e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f" gracePeriod=30 Jan 26 11:07:27 crc kubenswrapper[5003]: E0126 11:07:27.393031 5003 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 26 11:07:27 crc kubenswrapper[5003]: E0126 11:07:27.393115 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts podName:afd987da-215e-4edc-940c-1529b3531bf1 nodeName:}" failed. No retries permitted until 2026-01-26 11:07:27.893094706 +0000 UTC m=+1463.434320267 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts") pod "keystone2760-account-delete-52t5q" (UID: "afd987da-215e-4edc-940c-1529b3531bf1") : configmap "openstack-scripts" not found Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.571002 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.685789 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.696805 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvpcn\" (UniqueName: \"kubernetes.io/projected/e003e40c-8a7e-434a-b20a-1ac895a8d682-kube-api-access-hvpcn\") pod \"e003e40c-8a7e-434a-b20a-1ac895a8d682\" (UID: \"e003e40c-8a7e-434a-b20a-1ac895a8d682\") " Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.696931 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e003e40c-8a7e-434a-b20a-1ac895a8d682-operator-scripts\") pod \"e003e40c-8a7e-434a-b20a-1ac895a8d682\" (UID: \"e003e40c-8a7e-434a-b20a-1ac895a8d682\") " Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.697436 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e003e40c-8a7e-434a-b20a-1ac895a8d682-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e003e40c-8a7e-434a-b20a-1ac895a8d682" (UID: "e003e40c-8a7e-434a-b20a-1ac895a8d682"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.704528 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e003e40c-8a7e-434a-b20a-1ac895a8d682-kube-api-access-hvpcn" (OuterVolumeSpecName: "kube-api-access-hvpcn") pod "e003e40c-8a7e-434a-b20a-1ac895a8d682" (UID: "e003e40c-8a7e-434a-b20a-1ac895a8d682"). InnerVolumeSpecName "kube-api-access-hvpcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.766176 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/memcached-0"] Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.767630 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/memcached-0" podUID="7210ca64-60f9-4e11-bd2c-6e4905b0b948" containerName="memcached" containerID="cri-o://9d0ea22f8d5cbef2b6ac5abbfaa6920191e268f9920611f04794c7edaa2d3ed9" gracePeriod=30 Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.798011 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aac059e-645d-4967-838a-e51e27aad2ac-logs\") pod \"0aac059e-645d-4967-838a-e51e27aad2ac\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.798141 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmw85\" (UniqueName: \"kubernetes.io/projected/0aac059e-645d-4967-838a-e51e27aad2ac-kube-api-access-cmw85\") pod \"0aac059e-645d-4967-838a-e51e27aad2ac\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.798170 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data\") pod \"0aac059e-645d-4967-838a-e51e27aad2ac\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.798239 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data-custom\") pod \"0aac059e-645d-4967-838a-e51e27aad2ac\" (UID: \"0aac059e-645d-4967-838a-e51e27aad2ac\") " Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.798555 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvpcn\" (UniqueName: \"kubernetes.io/projected/e003e40c-8a7e-434a-b20a-1ac895a8d682-kube-api-access-hvpcn\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.798555 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0aac059e-645d-4967-838a-e51e27aad2ac-logs" (OuterVolumeSpecName: "logs") pod "0aac059e-645d-4967-838a-e51e27aad2ac" (UID: "0aac059e-645d-4967-838a-e51e27aad2ac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.798574 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e003e40c-8a7e-434a-b20a-1ac895a8d682-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.801270 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0aac059e-645d-4967-838a-e51e27aad2ac" (UID: "0aac059e-645d-4967-838a-e51e27aad2ac"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.802016 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aac059e-645d-4967-838a-e51e27aad2ac-kube-api-access-cmw85" (OuterVolumeSpecName: "kube-api-access-cmw85") pod "0aac059e-645d-4967-838a-e51e27aad2ac" (UID: "0aac059e-645d-4967-838a-e51e27aad2ac"). InnerVolumeSpecName "kube-api-access-cmw85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.828312 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data" (OuterVolumeSpecName: "config-data") pod "0aac059e-645d-4967-838a-e51e27aad2ac" (UID: "0aac059e-645d-4967-838a-e51e27aad2ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.899910 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.899941 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aac059e-645d-4967-838a-e51e27aad2ac-logs\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.899951 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmw85\" (UniqueName: \"kubernetes.io/projected/0aac059e-645d-4967-838a-e51e27aad2ac-kube-api-access-cmw85\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:27 crc kubenswrapper[5003]: I0126 11:07:27.899963 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aac059e-645d-4967-838a-e51e27aad2ac-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:27 crc kubenswrapper[5003]: E0126 11:07:27.900014 5003 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 26 11:07:27 crc kubenswrapper[5003]: E0126 11:07:27.900088 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts podName:afd987da-215e-4edc-940c-1529b3531bf1 nodeName:}" failed. No retries permitted until 2026-01-26 11:07:28.900072341 +0000 UTC m=+1464.441297902 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts") pod "keystone2760-account-delete-52t5q" (UID: "afd987da-215e-4edc-940c-1529b3531bf1") : configmap "openstack-scripts" not found Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.166242 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.239562 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.278107 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.96:9311/healthcheck\": read tcp 10.217.0.2:36324->10.217.0.96:9311: read: connection reset by peer" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.278131 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.96:9311/healthcheck\": read tcp 10.217.0.2:36330->10.217.0.96:9311: read: connection reset by peer" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.298221 5003 generic.go:334] "Generic (PLEG): container finished" podID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerID="e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f" exitCode=0 Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.298315 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac","Type":"ContainerDied","Data":"e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f"} Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.298331 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.298348 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac","Type":"ContainerDied","Data":"d45a1ad40bdd7306108d530ec90168a01807bd506e2822c4c99f7d69f0882793"} Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.298382 5003 scope.go:117] "RemoveContainer" containerID="e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.301866 5003 generic.go:334] "Generic (PLEG): container finished" podID="0aac059e-645d-4967-838a-e51e27aad2ac" containerID="b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17" exitCode=0 Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.301994 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.301994 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" event={"ID":"0aac059e-645d-4967-838a-e51e27aad2ac","Type":"ContainerDied","Data":"b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17"} Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.302052 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-85d88cd875-jg89w" event={"ID":"0aac059e-645d-4967-838a-e51e27aad2ac","Type":"ContainerDied","Data":"14f3d31510cae86d510b49ef90686c64210c71b197759f7e9f25b234c0b1314f"} Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.304446 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.304549 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-generated\") pod \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.304626 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-operator-scripts\") pod \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.304755 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" event={"ID":"e003e40c-8a7e-434a-b20a-1ac895a8d682","Type":"ContainerDied","Data":"ffcdacd84e7313c601b98bad2cfc189ad23d0613cae0215e704ea687bb992470"} Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.304842 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffcdacd84e7313c601b98bad2cfc189ad23d0613cae0215e704ea687bb992470" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.304960 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbicancbfe-account-delete-9rhg6" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.305384 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" (UID: "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.305938 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" (UID: "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.306001 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4khmx\" (UniqueName: \"kubernetes.io/projected/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kube-api-access-4khmx\") pod \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.306029 5003 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" secret="" err="secret \"galera-openstack-dockercfg-kkmfn\" not found" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.306795 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" (UID: "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.306042 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-default\") pod \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.306921 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kolla-config\") pod \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\" (UID: \"bd511f4f-c18a-4f7c-8fb9-1d760a3039ac\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.307667 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.307696 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.307710 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.308314 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" (UID: "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.311829 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kube-api-access-4khmx" (OuterVolumeSpecName: "kube-api-access-4khmx") pod "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" (UID: "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac"). InnerVolumeSpecName "kube-api-access-4khmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.325810 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "mysql-db") pod "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" (UID: "bd511f4f-c18a-4f7c-8fb9-1d760a3039ac"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.386243 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-worker-85d88cd875-jg89w"] Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.388381 5003 scope.go:117] "RemoveContainer" containerID="16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.392340 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-worker-85d88cd875-jg89w"] Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.409020 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.409060 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4khmx\" (UniqueName: \"kubernetes.io/projected/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kube-api-access-4khmx\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.409075 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.459142 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.461782 5003 scope.go:117] "RemoveContainer" containerID="e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f" Jan 26 11:07:28 crc kubenswrapper[5003]: E0126 11:07:28.463508 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f\": container with ID starting with e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f not found: ID does not exist" containerID="e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.463569 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f"} err="failed to get container status \"e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f\": rpc error: code = NotFound desc = could not find container \"e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f\": container with ID starting with e1648b06fce8b827b74330e6ce165933a3bd587e4a42f925c774a3562cbe625f not found: ID does not exist" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.463601 5003 scope.go:117] "RemoveContainer" containerID="16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd" Jan 26 11:07:28 crc kubenswrapper[5003]: E0126 11:07:28.465397 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd\": container with ID starting with 16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd not found: ID does not exist" containerID="16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.465435 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd"} err="failed to get container status \"16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd\": rpc error: code = NotFound desc = could not find container \"16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd\": container with ID starting with 16abeed0f2f6ba7519e44cf47f900612abcceb0e00e58ec45ce8510f38a266fd not found: ID does not exist" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.465451 5003 scope.go:117] "RemoveContainer" containerID="b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.511165 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.546252 5003 scope.go:117] "RemoveContainer" containerID="e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.594255 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.615541 5003 scope.go:117] "RemoveContainer" containerID="b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17" Jan 26 11:07:28 crc kubenswrapper[5003]: E0126 11:07:28.616797 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17\": container with ID starting with b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17 not found: ID does not exist" containerID="b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.616843 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17"} err="failed to get container status \"b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17\": rpc error: code = NotFound desc = could not find container \"b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17\": container with ID starting with b033a76c17ed034389897b4015037094a75a7297f853d9f4e782227a6f0a5e17 not found: ID does not exist" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.616866 5003 scope.go:117] "RemoveContainer" containerID="e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17" Jan 26 11:07:28 crc kubenswrapper[5003]: E0126 11:07:28.629482 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17\": container with ID starting with e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17 not found: ID does not exist" containerID="e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.629540 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17"} err="failed to get container status \"e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17\": rpc error: code = NotFound desc = could not find container \"e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17\": container with ID starting with e7f0a55bf957c5db86d64021a11b76095732a05c01f1b8235c887e376fe03a17 not found: ID does not exist" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.654548 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.659525 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.664143 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.695546 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/rabbitmq-server-0" podUID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerName="rabbitmq" containerID="cri-o://cb64f285d792c261060fd6ddf7d2f6c8d187e1c4445c755d57bb1da01ddfc111" gracePeriod=604800 Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.713111 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data\") pod \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.713199 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9qsr\" (UniqueName: \"kubernetes.io/projected/8e9f91e0-3cda-46fd-9034-08b41bf5f546-kube-api-access-p9qsr\") pod \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.713263 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data-custom\") pod \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.713299 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e9f91e0-3cda-46fd-9034-08b41bf5f546-logs\") pod \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\" (UID: \"8e9f91e0-3cda-46fd-9034-08b41bf5f546\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.714256 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e9f91e0-3cda-46fd-9034-08b41bf5f546-logs" (OuterVolumeSpecName: "logs") pod "8e9f91e0-3cda-46fd-9034-08b41bf5f546" (UID: "8e9f91e0-3cda-46fd-9034-08b41bf5f546"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.719271 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8e9f91e0-3cda-46fd-9034-08b41bf5f546" (UID: "8e9f91e0-3cda-46fd-9034-08b41bf5f546"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.722493 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e9f91e0-3cda-46fd-9034-08b41bf5f546-kube-api-access-p9qsr" (OuterVolumeSpecName: "kube-api-access-p9qsr") pod "8e9f91e0-3cda-46fd-9034-08b41bf5f546" (UID: "8e9f91e0-3cda-46fd-9034-08b41bf5f546"). InnerVolumeSpecName "kube-api-access-p9qsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.748858 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.765509 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data" (OuterVolumeSpecName: "config-data") pod "8e9f91e0-3cda-46fd-9034-08b41bf5f546" (UID: "8e9f91e0-3cda-46fd-9034-08b41bf5f546"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815119 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq662\" (UniqueName: \"kubernetes.io/projected/3ca7900c-1191-4d34-a44f-29fd6d510d90-kube-api-access-sq662\") pod \"3ca7900c-1191-4d34-a44f-29fd6d510d90\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815202 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data-custom\") pod \"3ca7900c-1191-4d34-a44f-29fd6d510d90\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815269 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca7900c-1191-4d34-a44f-29fd6d510d90-logs\") pod \"3ca7900c-1191-4d34-a44f-29fd6d510d90\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815321 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data\") pod \"3ca7900c-1191-4d34-a44f-29fd6d510d90\" (UID: \"3ca7900c-1191-4d34-a44f-29fd6d510d90\") " Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815678 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815703 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e9f91e0-3cda-46fd-9034-08b41bf5f546-logs\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815717 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e9f91e0-3cda-46fd-9034-08b41bf5f546-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815710 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ca7900c-1191-4d34-a44f-29fd6d510d90-logs" (OuterVolumeSpecName: "logs") pod "3ca7900c-1191-4d34-a44f-29fd6d510d90" (UID: "3ca7900c-1191-4d34-a44f-29fd6d510d90"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.815731 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9qsr\" (UniqueName: \"kubernetes.io/projected/8e9f91e0-3cda-46fd-9034-08b41bf5f546-kube-api-access-p9qsr\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.818906 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ca7900c-1191-4d34-a44f-29fd6d510d90-kube-api-access-sq662" (OuterVolumeSpecName: "kube-api-access-sq662") pod "3ca7900c-1191-4d34-a44f-29fd6d510d90" (UID: "3ca7900c-1191-4d34-a44f-29fd6d510d90"). InnerVolumeSpecName "kube-api-access-sq662". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.819345 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3ca7900c-1191-4d34-a44f-29fd6d510d90" (UID: "3ca7900c-1191-4d34-a44f-29fd6d510d90"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.849491 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data" (OuterVolumeSpecName: "config-data") pod "3ca7900c-1191-4d34-a44f-29fd6d510d90" (UID: "3ca7900c-1191-4d34-a44f-29fd6d510d90"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.917411 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.917443 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca7900c-1191-4d34-a44f-29fd6d510d90-logs\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.917455 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca7900c-1191-4d34-a44f-29fd6d510d90-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: I0126 11:07:28.917465 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq662\" (UniqueName: \"kubernetes.io/projected/3ca7900c-1191-4d34-a44f-29fd6d510d90-kube-api-access-sq662\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:28 crc kubenswrapper[5003]: E0126 11:07:28.917531 5003 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 26 11:07:28 crc kubenswrapper[5003]: E0126 11:07:28.917580 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts podName:afd987da-215e-4edc-940c-1529b3531bf1 nodeName:}" failed. No retries permitted until 2026-01-26 11:07:30.917566522 +0000 UTC m=+1466.458792083 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts") pod "keystone2760-account-delete-52t5q" (UID: "afd987da-215e-4edc-940c-1529b3531bf1") : configmap "openstack-scripts" not found Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.017399 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" path="/var/lib/kubelet/pods/0aac059e-645d-4967-838a-e51e27aad2ac/volumes" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.018116 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" path="/var/lib/kubelet/pods/bd511f4f-c18a-4f7c-8fb9-1d760a3039ac/volumes" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.018706 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d959552e-4345-4061-84f9-bbe50cca4b4d" path="/var/lib/kubelet/pods/d959552e-4345-4061-84f9-bbe50cca4b4d/volumes" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.314055 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/openstack-galera-1" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerName="galera" containerID="cri-o://cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646" gracePeriod=28 Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.317828 5003 generic.go:334] "Generic (PLEG): container finished" podID="7210ca64-60f9-4e11-bd2c-6e4905b0b948" containerID="9d0ea22f8d5cbef2b6ac5abbfaa6920191e268f9920611f04794c7edaa2d3ed9" exitCode=0 Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.318222 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"7210ca64-60f9-4e11-bd2c-6e4905b0b948","Type":"ContainerDied","Data":"9d0ea22f8d5cbef2b6ac5abbfaa6920191e268f9920611f04794c7edaa2d3ed9"} Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.318258 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"7210ca64-60f9-4e11-bd2c-6e4905b0b948","Type":"ContainerDied","Data":"41f340c59885c338f2aa1019cff82a5b2d20b97ca2b10935fd0ce3c710bd26fb"} Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.318289 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41f340c59885c338f2aa1019cff82a5b2d20b97ca2b10935fd0ce3c710bd26fb" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.318647 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.324200 5003 generic.go:334] "Generic (PLEG): container finished" podID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerID="cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6" exitCode=0 Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.324272 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" event={"ID":"3ca7900c-1191-4d34-a44f-29fd6d510d90","Type":"ContainerDied","Data":"cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6"} Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.324318 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" event={"ID":"3ca7900c-1191-4d34-a44f-29fd6d510d90","Type":"ContainerDied","Data":"80d6d084fd480d033efbc70ed9544ea8111770ff6dfaf400bcf614cd1a9f8be4"} Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.324336 5003 scope.go:117] "RemoveContainer" containerID="cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.324461 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.335141 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerID="c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd" exitCode=0 Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.335219 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" event={"ID":"8e9f91e0-3cda-46fd-9034-08b41bf5f546","Type":"ContainerDied","Data":"c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd"} Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.335264 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" event={"ID":"8e9f91e0-3cda-46fd-9034-08b41bf5f546","Type":"ContainerDied","Data":"274b8389b6b0dc53a5de34ce8ad0938b2de4028c96ab65e8f080a80e6cafdb2a"} Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.335389 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-789587dbb8-kcmjx" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.359612 5003 scope.go:117] "RemoveContainer" containerID="190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.362353 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-api-789587dbb8-kcmjx"] Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.373477 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-api-789587dbb8-kcmjx"] Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.380755 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77"] Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.384175 5003 scope.go:117] "RemoveContainer" containerID="cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6" Jan 26 11:07:29 crc kubenswrapper[5003]: E0126 11:07:29.384689 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6\": container with ID starting with cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6 not found: ID does not exist" containerID="cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.384728 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6"} err="failed to get container status \"cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6\": rpc error: code = NotFound desc = could not find container \"cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6\": container with ID starting with cd88fd7d9ca42a0ce1ea33bc0c29660606bc8fe81c56254a1e46b0661c3056a6 not found: ID does not exist" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.384771 5003 scope.go:117] "RemoveContainer" containerID="190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c" Jan 26 11:07:29 crc kubenswrapper[5003]: E0126 11:07:29.385049 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c\": container with ID starting with 190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c not found: ID does not exist" containerID="190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.385094 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c"} err="failed to get container status \"190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c\": rpc error: code = NotFound desc = could not find container \"190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c\": container with ID starting with 190f5c4c6ff888fedb8a854b0496e160e2711922c5c7616b23b9a77f8394fd8c not found: ID does not exist" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.385121 5003 scope.go:117] "RemoveContainer" containerID="c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.386668 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-66f485f88-d4z77"] Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.403659 5003 scope.go:117] "RemoveContainer" containerID="1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.423966 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kolla-config\") pod \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.424156 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6s5n\" (UniqueName: \"kubernetes.io/projected/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kube-api-access-z6s5n\") pod \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.424251 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-config-data\") pod \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\" (UID: \"7210ca64-60f9-4e11-bd2c-6e4905b0b948\") " Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.424826 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "7210ca64-60f9-4e11-bd2c-6e4905b0b948" (UID: "7210ca64-60f9-4e11-bd2c-6e4905b0b948"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.425535 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-config-data" (OuterVolumeSpecName: "config-data") pod "7210ca64-60f9-4e11-bd2c-6e4905b0b948" (UID: "7210ca64-60f9-4e11-bd2c-6e4905b0b948"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.429249 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kube-api-access-z6s5n" (OuterVolumeSpecName: "kube-api-access-z6s5n") pod "7210ca64-60f9-4e11-bd2c-6e4905b0b948" (UID: "7210ca64-60f9-4e11-bd2c-6e4905b0b948"). InnerVolumeSpecName "kube-api-access-z6s5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.448439 5003 scope.go:117] "RemoveContainer" containerID="c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd" Jan 26 11:07:29 crc kubenswrapper[5003]: E0126 11:07:29.448903 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd\": container with ID starting with c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd not found: ID does not exist" containerID="c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.448966 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd"} err="failed to get container status \"c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd\": rpc error: code = NotFound desc = could not find container \"c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd\": container with ID starting with c02da643fd61ea25a72c9b0890782bc77a961e0b80a41bb2a96e4b5dedf763dd not found: ID does not exist" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.449006 5003 scope.go:117] "RemoveContainer" containerID="1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28" Jan 26 11:07:29 crc kubenswrapper[5003]: E0126 11:07:29.449337 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28\": container with ID starting with 1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28 not found: ID does not exist" containerID="1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.449362 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28"} err="failed to get container status \"1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28\": rpc error: code = NotFound desc = could not find container \"1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28\": container with ID starting with 1e7a531cd0db4df67ad0a64eb22cf87ad43ecae131e25f253629eb31adfd2d28 not found: ID does not exist" Jan 26 11:07:29 crc kubenswrapper[5003]: E0126 11:07:29.472594 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 26 11:07:29 crc kubenswrapper[5003]: E0126 11:07:29.474737 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 26 11:07:29 crc kubenswrapper[5003]: E0126 11:07:29.485865 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 26 11:07:29 crc kubenswrapper[5003]: E0126 11:07:29.485962 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="swift-kuttl-tests/openstack-galera-1" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerName="galera" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.525441 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6s5n\" (UniqueName: \"kubernetes.io/projected/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kube-api-access-z6s5n\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.525481 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.525491 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7210ca64-60f9-4e11-bd2c-6e4905b0b948-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.548891 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" podUID="93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" containerName="keystone-api" probeResult="failure" output="Get \"http://10.217.0.85:5000/v3\": read tcp 10.217.0.2:53196->10.217.0.85:5000: read: connection reset by peer" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.705910 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/rabbitmq-server-0" podUID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.70:5672: connect: connection refused" Jan 26 11:07:29 crc kubenswrapper[5003]: I0126 11:07:29.944826 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.038736 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-scripts\") pod \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.038862 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-fernet-keys\") pod \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.038919 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-config-data\") pod \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.038947 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk5zs\" (UniqueName: \"kubernetes.io/projected/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-kube-api-access-gk5zs\") pod \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.039006 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-credential-keys\") pod \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\" (UID: \"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.044249 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-scripts" (OuterVolumeSpecName: "scripts") pod "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" (UID: "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.044643 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-kube-api-access-gk5zs" (OuterVolumeSpecName: "kube-api-access-gk5zs") pod "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" (UID: "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c"). InnerVolumeSpecName "kube-api-access-gk5zs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.045270 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" (UID: "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.045604 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" (UID: "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.081510 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-config-data" (OuterVolumeSpecName: "config-data") pod "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" (UID: "93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.115057 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-db-create-9cxv7"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.126959 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-db-create-9cxv7"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.133520 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbicancbfe-account-delete-9rhg6"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.140827 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.141897 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.141924 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.141933 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.141942 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk5zs\" (UniqueName: \"kubernetes.io/projected/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-kube-api-access-gk5zs\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.141951 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.146690 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbicancbfe-account-delete-9rhg6"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.151345 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-cbfe-account-create-update-dzdjr"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.354345 5003 generic.go:334] "Generic (PLEG): container finished" podID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerID="cb64f285d792c261060fd6ddf7d2f6c8d187e1c4445c755d57bb1da01ddfc111" exitCode=0 Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.354413 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2","Type":"ContainerDied","Data":"cb64f285d792c261060fd6ddf7d2f6c8d187e1c4445c755d57bb1da01ddfc111"} Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.354470 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2","Type":"ContainerDied","Data":"a99edba5fe7bf60f154749b714e4b8f8a52e8cefefdcabd2867fe01b0137ae80"} Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.354493 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a99edba5fe7bf60f154749b714e4b8f8a52e8cefefdcabd2867fe01b0137ae80" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.355541 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.356079 5003 generic.go:334] "Generic (PLEG): container finished" podID="93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" containerID="6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a" exitCode=0 Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.356146 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.356151 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" event={"ID":"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c","Type":"ContainerDied","Data":"6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a"} Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.356211 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" event={"ID":"93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c","Type":"ContainerDied","Data":"a7e7db0eed8633ec3556471a345335d661b24273d9f37cf02b1f4bc34ad5ccd2"} Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.356241 5003 scope.go:117] "RemoveContainer" containerID="6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.356149 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-f9445869f-dzj8v" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.382361 5003 scope.go:117] "RemoveContainer" containerID="6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a" Jan 26 11:07:30 crc kubenswrapper[5003]: E0126 11:07:30.382882 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a\": container with ID starting with 6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a not found: ID does not exist" containerID="6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.383015 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a"} err="failed to get container status \"6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a\": rpc error: code = NotFound desc = could not find container \"6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a\": container with ID starting with 6a0f55184c94033a37ba26dd8dcfbbdf62fb8034476351b07a7e2449325acb9a not found: ID does not exist" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.412188 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/memcached-0"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.425332 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/memcached-0"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.431373 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-f9445869f-dzj8v"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.451574 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-confd\") pod \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.451699 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-erlang-cookie-secret\") pod \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.451737 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-pod-info\") pod \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.451773 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-plugins\") pod \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.451998 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337\") pod \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.452036 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-plugins-conf\") pod \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.452074 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-erlang-cookie\") pod \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.452114 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2x6d\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-kube-api-access-w2x6d\") pod \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\" (UID: \"0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2\") " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.454496 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" (UID: "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.454557 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" (UID: "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.454700 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" (UID: "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.465841 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-f9445869f-dzj8v"] Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.466011 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-pod-info" (OuterVolumeSpecName: "pod-info") pod "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" (UID: "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.468380 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337" (OuterVolumeSpecName: "persistence") pod "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" (UID: "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2"). InnerVolumeSpecName "pvc-e5cf6171-4855-4722-b78a-d87822d3d337". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.480147 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" (UID: "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.483847 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-kube-api-access-w2x6d" (OuterVolumeSpecName: "kube-api-access-w2x6d") pod "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" (UID: "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2"). InnerVolumeSpecName "kube-api-access-w2x6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.544492 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" (UID: "0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.554225 5003 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.554264 5003 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-pod-info\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.554274 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.554333 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e5cf6171-4855-4722-b78a-d87822d3d337\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337\") on node \"crc\" " Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.554348 5003 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.554358 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.554369 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2x6d\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-kube-api-access-w2x6d\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.554380 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.572073 5003 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.572626 5003 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e5cf6171-4855-4722-b78a-d87822d3d337" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337") on node "crc" Jan 26 11:07:30 crc kubenswrapper[5003]: I0126 11:07:30.655485 5003 reconciler_common.go:293] "Volume detached for volume \"pvc-e5cf6171-4855-4722-b78a-d87822d3d337\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e5cf6171-4855-4722-b78a-d87822d3d337\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:30 crc kubenswrapper[5003]: E0126 11:07:30.959270 5003 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 26 11:07:30 crc kubenswrapper[5003]: E0126 11:07:30.959362 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts podName:afd987da-215e-4edc-940c-1529b3531bf1 nodeName:}" failed. No retries permitted until 2026-01-26 11:07:34.959347028 +0000 UTC m=+1470.500572589 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts") pod "keystone2760-account-delete-52t5q" (UID: "afd987da-215e-4edc-940c-1529b3531bf1") : configmap "openstack-scripts" not found Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.016971 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" path="/var/lib/kubelet/pods/3ca7900c-1191-4d34-a44f-29fd6d510d90/volumes" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.017866 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7210ca64-60f9-4e11-bd2c-6e4905b0b948" path="/var/lib/kubelet/pods/7210ca64-60f9-4e11-bd2c-6e4905b0b948/volumes" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.018516 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" path="/var/lib/kubelet/pods/8e9f91e0-3cda-46fd-9034-08b41bf5f546/volumes" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.019789 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" path="/var/lib/kubelet/pods/93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c/volumes" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.020411 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7e67ef6-3327-4473-a44e-3c65226ce5db" path="/var/lib/kubelet/pods/a7e67ef6-3327-4473-a44e-3c65226ce5db/volumes" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.021007 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac2063ed-42ef-4188-826b-a69356f49a65" path="/var/lib/kubelet/pods/ac2063ed-42ef-4188-826b-a69356f49a65/volumes" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.022517 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e003e40c-8a7e-434a-b20a-1ac895a8d682" path="/var/lib/kubelet/pods/e003e40c-8a7e-434a-b20a-1ac895a8d682/volumes" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.153630 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.263677 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcxzx\" (UniqueName: \"kubernetes.io/projected/261ebec9-25ad-4434-bf06-3feeee0f0eff-kube-api-access-dcxzx\") pod \"261ebec9-25ad-4434-bf06-3feeee0f0eff\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.263730 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"261ebec9-25ad-4434-bf06-3feeee0f0eff\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.263749 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-default\") pod \"261ebec9-25ad-4434-bf06-3feeee0f0eff\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.263806 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-generated\") pod \"261ebec9-25ad-4434-bf06-3feeee0f0eff\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.263825 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-kolla-config\") pod \"261ebec9-25ad-4434-bf06-3feeee0f0eff\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.263865 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-operator-scripts\") pod \"261ebec9-25ad-4434-bf06-3feeee0f0eff\" (UID: \"261ebec9-25ad-4434-bf06-3feeee0f0eff\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.264823 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "261ebec9-25ad-4434-bf06-3feeee0f0eff" (UID: "261ebec9-25ad-4434-bf06-3feeee0f0eff"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.264883 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "261ebec9-25ad-4434-bf06-3feeee0f0eff" (UID: "261ebec9-25ad-4434-bf06-3feeee0f0eff"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.264890 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "261ebec9-25ad-4434-bf06-3feeee0f0eff" (UID: "261ebec9-25ad-4434-bf06-3feeee0f0eff"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.265110 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "261ebec9-25ad-4434-bf06-3feeee0f0eff" (UID: "261ebec9-25ad-4434-bf06-3feeee0f0eff"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.267339 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/261ebec9-25ad-4434-bf06-3feeee0f0eff-kube-api-access-dcxzx" (OuterVolumeSpecName: "kube-api-access-dcxzx") pod "261ebec9-25ad-4434-bf06-3feeee0f0eff" (UID: "261ebec9-25ad-4434-bf06-3feeee0f0eff"). InnerVolumeSpecName "kube-api-access-dcxzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.279055 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "mysql-db") pod "261ebec9-25ad-4434-bf06-3feeee0f0eff" (UID: "261ebec9-25ad-4434-bf06-3feeee0f0eff"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.365099 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcxzx\" (UniqueName: \"kubernetes.io/projected/261ebec9-25ad-4434-bf06-3feeee0f0eff-kube-api-access-dcxzx\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.365139 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.365171 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.365207 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/261ebec9-25ad-4434-bf06-3feeee0f0eff-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.365221 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.365232 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/261ebec9-25ad-4434-bf06-3feeee0f0eff-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.366031 5003 generic.go:334] "Generic (PLEG): container finished" podID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerID="cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646" exitCode=0 Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.366104 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.366136 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.366109 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"261ebec9-25ad-4434-bf06-3feeee0f0eff","Type":"ContainerDied","Data":"cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646"} Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.366177 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"261ebec9-25ad-4434-bf06-3feeee0f0eff","Type":"ContainerDied","Data":"663c64d0a7a356719ff3d3dc14f6da756a5ee6e383f90eb429faf70ae5011476"} Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.366196 5003 scope.go:117] "RemoveContainer" containerID="cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.378488 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.388584 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.389688 5003 scope.go:117] "RemoveContainer" containerID="a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.398560 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.410708 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/openstack-galera-0" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" containerName="galera" containerID="cri-o://5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb" gracePeriod=26 Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.412570 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.419273 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.427577 5003 scope.go:117] "RemoveContainer" containerID="cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646" Jan 26 11:07:31 crc kubenswrapper[5003]: E0126 11:07:31.428024 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646\": container with ID starting with cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646 not found: ID does not exist" containerID="cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.428057 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646"} err="failed to get container status \"cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646\": rpc error: code = NotFound desc = could not find container \"cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646\": container with ID starting with cedc05e74c7904ed0bb5572705f2be47c553af9e6652d1bda63282ec2c7dc646 not found: ID does not exist" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.428077 5003 scope.go:117] "RemoveContainer" containerID="a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2" Jan 26 11:07:31 crc kubenswrapper[5003]: E0126 11:07:31.428392 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2\": container with ID starting with a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2 not found: ID does not exist" containerID="a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.428450 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2"} err="failed to get container status \"a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2\": rpc error: code = NotFound desc = could not find container \"a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2\": container with ID starting with a8e81a795fb5013a8878effc7c783820cdee610ff4de641745e9755bd2139ff2 not found: ID does not exist" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.466605 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.501662 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-db-create-chmzt"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.515091 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-db-create-chmzt"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.519826 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.520040 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" podUID="dcce3480-cea7-4075-80c8-60c85f8acdab" containerName="manager" containerID="cri-o://da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e" gracePeriod=10 Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.537448 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone2760-account-delete-52t5q"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.537698 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" podUID="afd987da-215e-4edc-940c-1529b3531bf1" containerName="mariadb-account-delete" containerID="cri-o://d63d633fd5857b51aa6161ab6b3094bda0ede0e4f9ede3bcf3363f7df4090c1e" gracePeriod=30 Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.555353 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-2760-account-create-update-c4bzl"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.562174 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-2760-account-create-update-c4bzl"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.743459 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-wp2gn"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.743709 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/swift-operator-index-wp2gn" podUID="f6b37c7e-571e-4047-99a1-149f0956e9a8" containerName="registry-server" containerID="cri-o://c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d" gracePeriod=30 Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.792074 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.808002 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/996dd2d122cc8f9fe59790ac7a034c9f3831d1134aff1130f721ee76b9t5ncz"] Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.907646 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.973029 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-webhook-cert\") pod \"dcce3480-cea7-4075-80c8-60c85f8acdab\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.973071 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-apiservice-cert\") pod \"dcce3480-cea7-4075-80c8-60c85f8acdab\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.973206 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqk7g\" (UniqueName: \"kubernetes.io/projected/dcce3480-cea7-4075-80c8-60c85f8acdab-kube-api-access-fqk7g\") pod \"dcce3480-cea7-4075-80c8-60c85f8acdab\" (UID: \"dcce3480-cea7-4075-80c8-60c85f8acdab\") " Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.978877 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "dcce3480-cea7-4075-80c8-60c85f8acdab" (UID: "dcce3480-cea7-4075-80c8-60c85f8acdab"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.978929 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "dcce3480-cea7-4075-80c8-60c85f8acdab" (UID: "dcce3480-cea7-4075-80c8-60c85f8acdab"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:31 crc kubenswrapper[5003]: I0126 11:07:31.978965 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcce3480-cea7-4075-80c8-60c85f8acdab-kube-api-access-fqk7g" (OuterVolumeSpecName: "kube-api-access-fqk7g") pod "dcce3480-cea7-4075-80c8-60c85f8acdab" (UID: "dcce3480-cea7-4075-80c8-60c85f8acdab"). InnerVolumeSpecName "kube-api-access-fqk7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.077364 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqk7g\" (UniqueName: \"kubernetes.io/projected/dcce3480-cea7-4075-80c8-60c85f8acdab-kube-api-access-fqk7g\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.077412 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.077427 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dcce3480-cea7-4075-80c8-60c85f8acdab-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.142645 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.233752 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.282759 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d62gq\" (UniqueName: \"kubernetes.io/projected/5d8bd836-ef6c-425f-b570-69c53560c715-kube-api-access-d62gq\") pod \"5d8bd836-ef6c-425f-b570-69c53560c715\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.282834 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-operator-scripts\") pod \"5d8bd836-ef6c-425f-b570-69c53560c715\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.282883 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"5d8bd836-ef6c-425f-b570-69c53560c715\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.282911 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-generated\") pod \"5d8bd836-ef6c-425f-b570-69c53560c715\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.282982 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkknw\" (UniqueName: \"kubernetes.io/projected/f6b37c7e-571e-4047-99a1-149f0956e9a8-kube-api-access-mkknw\") pod \"f6b37c7e-571e-4047-99a1-149f0956e9a8\" (UID: \"f6b37c7e-571e-4047-99a1-149f0956e9a8\") " Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.283007 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-default\") pod \"5d8bd836-ef6c-425f-b570-69c53560c715\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.283029 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-kolla-config\") pod \"5d8bd836-ef6c-425f-b570-69c53560c715\" (UID: \"5d8bd836-ef6c-425f-b570-69c53560c715\") " Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.283909 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "5d8bd836-ef6c-425f-b570-69c53560c715" (UID: "5d8bd836-ef6c-425f-b570-69c53560c715"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.283973 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5d8bd836-ef6c-425f-b570-69c53560c715" (UID: "5d8bd836-ef6c-425f-b570-69c53560c715"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.283998 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "5d8bd836-ef6c-425f-b570-69c53560c715" (UID: "5d8bd836-ef6c-425f-b570-69c53560c715"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.284186 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5d8bd836-ef6c-425f-b570-69c53560c715" (UID: "5d8bd836-ef6c-425f-b570-69c53560c715"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.290324 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d8bd836-ef6c-425f-b570-69c53560c715-kube-api-access-d62gq" (OuterVolumeSpecName: "kube-api-access-d62gq") pod "5d8bd836-ef6c-425f-b570-69c53560c715" (UID: "5d8bd836-ef6c-425f-b570-69c53560c715"). InnerVolumeSpecName "kube-api-access-d62gq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.302602 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6b37c7e-571e-4047-99a1-149f0956e9a8-kube-api-access-mkknw" (OuterVolumeSpecName: "kube-api-access-mkknw") pod "f6b37c7e-571e-4047-99a1-149f0956e9a8" (UID: "f6b37c7e-571e-4047-99a1-149f0956e9a8"). InnerVolumeSpecName "kube-api-access-mkknw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.314517 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "5d8bd836-ef6c-425f-b570-69c53560c715" (UID: "5d8bd836-ef6c-425f-b570-69c53560c715"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.375743 5003 generic.go:334] "Generic (PLEG): container finished" podID="dcce3480-cea7-4075-80c8-60c85f8acdab" containerID="da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e" exitCode=0 Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.375794 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.375799 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" event={"ID":"dcce3480-cea7-4075-80c8-60c85f8acdab","Type":"ContainerDied","Data":"da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e"} Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.376271 5003 scope.go:117] "RemoveContainer" containerID="da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.377313 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr" event={"ID":"dcce3480-cea7-4075-80c8-60c85f8acdab","Type":"ContainerDied","Data":"01ccd6c8b992c588a28c0a05ed2d726f47506ccdc507e12969509e44a963d1a9"} Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.377964 5003 generic.go:334] "Generic (PLEG): container finished" podID="f6b37c7e-571e-4047-99a1-149f0956e9a8" containerID="c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d" exitCode=0 Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.377994 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-wp2gn" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.378032 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-wp2gn" event={"ID":"f6b37c7e-571e-4047-99a1-149f0956e9a8","Type":"ContainerDied","Data":"c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d"} Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.378056 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-wp2gn" event={"ID":"f6b37c7e-571e-4047-99a1-149f0956e9a8","Type":"ContainerDied","Data":"59604d9a33760f03a7e7fb3f5f6f9687725dd9f36e39a908488c8db47376c3b2"} Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.384657 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkknw\" (UniqueName: \"kubernetes.io/projected/f6b37c7e-571e-4047-99a1-149f0956e9a8-kube-api-access-mkknw\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.384686 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.384697 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.384725 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d62gq\" (UniqueName: \"kubernetes.io/projected/5d8bd836-ef6c-425f-b570-69c53560c715-kube-api-access-d62gq\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.384733 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d8bd836-ef6c-425f-b570-69c53560c715-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.384766 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.384776 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5d8bd836-ef6c-425f-b570-69c53560c715-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.391312 5003 generic.go:334] "Generic (PLEG): container finished" podID="5d8bd836-ef6c-425f-b570-69c53560c715" containerID="5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb" exitCode=0 Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.391368 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"5d8bd836-ef6c-425f-b570-69c53560c715","Type":"ContainerDied","Data":"5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb"} Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.391400 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"5d8bd836-ef6c-425f-b570-69c53560c715","Type":"ContainerDied","Data":"8c7875a2f296334cefa0a94ad01ba3d72b9e9ca552e67acbcc2cd2ff21230a9a"} Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.391467 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.397812 5003 scope.go:117] "RemoveContainer" containerID="da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e" Jan 26 11:07:32 crc kubenswrapper[5003]: E0126 11:07:32.398194 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e\": container with ID starting with da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e not found: ID does not exist" containerID="da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.398226 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e"} err="failed to get container status \"da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e\": rpc error: code = NotFound desc = could not find container \"da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e\": container with ID starting with da3c80abe397c27549854d607536ba1e260b8447541d20eb6aa010bc00b7bf2e not found: ID does not exist" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.398244 5003 scope.go:117] "RemoveContainer" containerID="c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.425973 5003 scope.go:117] "RemoveContainer" containerID="c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d" Jan 26 11:07:32 crc kubenswrapper[5003]: E0126 11:07:32.426745 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d\": container with ID starting with c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d not found: ID does not exist" containerID="c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.426779 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d"} err="failed to get container status \"c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d\": rpc error: code = NotFound desc = could not find container \"c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d\": container with ID starting with c64c595414b582e16930d45787a4f77d232dc81218d9da2e1ecbce3093cca17d not found: ID does not exist" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.426797 5003 scope.go:117] "RemoveContainer" containerID="5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.431453 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.443141 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-wp2gn"] Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.451412 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-index-wp2gn"] Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.457148 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr"] Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.457311 5003 scope.go:117] "RemoveContainer" containerID="694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.463881 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5df966c899-zcbvr"] Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.472315 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.479799 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.481453 5003 scope.go:117] "RemoveContainer" containerID="5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb" Jan 26 11:07:32 crc kubenswrapper[5003]: E0126 11:07:32.485636 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb\": container with ID starting with 5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb not found: ID does not exist" containerID="5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.485670 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb"} err="failed to get container status \"5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb\": rpc error: code = NotFound desc = could not find container \"5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb\": container with ID starting with 5e7e64a851577b3501e476f5005e20e08f836912695da3d57df69b6202080eeb not found: ID does not exist" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.485692 5003 scope.go:117] "RemoveContainer" containerID="694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022" Jan 26 11:07:32 crc kubenswrapper[5003]: E0126 11:07:32.486048 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022\": container with ID starting with 694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022 not found: ID does not exist" containerID="694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.486076 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022"} err="failed to get container status \"694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022\": rpc error: code = NotFound desc = could not find container \"694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022\": container with ID starting with 694f7f169e16bca9cd4cb5133fadcca45f8dcb20454633bf64bbbe3c7b646022 not found: ID does not exist" Jan 26 11:07:32 crc kubenswrapper[5003]: I0126 11:07:32.488052 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:33 crc kubenswrapper[5003]: I0126 11:07:33.009735 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" path="/var/lib/kubelet/pods/0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2/volumes" Jan 26 11:07:33 crc kubenswrapper[5003]: I0126 11:07:33.010699 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" path="/var/lib/kubelet/pods/261ebec9-25ad-4434-bf06-3feeee0f0eff/volumes" Jan 26 11:07:33 crc kubenswrapper[5003]: I0126 11:07:33.011566 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="524e27d6-7ca0-4387-a4ad-f32cbe1121c0" path="/var/lib/kubelet/pods/524e27d6-7ca0-4387-a4ad-f32cbe1121c0/volumes" Jan 26 11:07:33 crc kubenswrapper[5003]: I0126 11:07:33.013121 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" path="/var/lib/kubelet/pods/5d8bd836-ef6c-425f-b570-69c53560c715/volumes" Jan 26 11:07:33 crc kubenswrapper[5003]: I0126 11:07:33.014012 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77bf3970-44c7-4a45-993c-aeec1a194089" path="/var/lib/kubelet/pods/77bf3970-44c7-4a45-993c-aeec1a194089/volumes" Jan 26 11:07:33 crc kubenswrapper[5003]: I0126 11:07:33.015668 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcce3480-cea7-4075-80c8-60c85f8acdab" path="/var/lib/kubelet/pods/dcce3480-cea7-4075-80c8-60c85f8acdab/volumes" Jan 26 11:07:33 crc kubenswrapper[5003]: I0126 11:07:33.016595 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e641b453-4123-45f4-8693-5cabd87d872b" path="/var/lib/kubelet/pods/e641b453-4123-45f4-8693-5cabd87d872b/volumes" Jan 26 11:07:33 crc kubenswrapper[5003]: I0126 11:07:33.017266 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6b37c7e-571e-4047-99a1-149f0956e9a8" path="/var/lib/kubelet/pods/f6b37c7e-571e-4047-99a1-149f0956e9a8/volumes" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.088938 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg"] Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.089555 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" podUID="1fedfbd6-7026-49ff-b4cd-bc52a093e02a" containerName="manager" containerID="cri-o://8360016c2d0f656d8d226388fceecd8f45d0f8acf100692ff2ffbf98ad3c54d8" gracePeriod=10 Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.365603 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-index-r8mc9"] Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.366208 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/barbican-operator-index-r8mc9" podUID="68526bd8-8a1a-478b-8b12-3333bcaf29c8" containerName="registry-server" containerID="cri-o://ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40" gracePeriod=30 Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.405074 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5"] Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.409292 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/70ec14eae73ebaae49fdbc61e5bed61c754e6936ca4cc9199fb9707139h74m5"] Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.414117 5003 generic.go:334] "Generic (PLEG): container finished" podID="1fedfbd6-7026-49ff-b4cd-bc52a093e02a" containerID="8360016c2d0f656d8d226388fceecd8f45d0f8acf100692ff2ffbf98ad3c54d8" exitCode=0 Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.414168 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" event={"ID":"1fedfbd6-7026-49ff-b4cd-bc52a093e02a","Type":"ContainerDied","Data":"8360016c2d0f656d8d226388fceecd8f45d0f8acf100692ff2ffbf98ad3c54d8"} Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.537734 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.624343 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqblg\" (UniqueName: \"kubernetes.io/projected/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-kube-api-access-vqblg\") pod \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.624677 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-apiservice-cert\") pod \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.624703 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-webhook-cert\") pod \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\" (UID: \"1fedfbd6-7026-49ff-b4cd-bc52a093e02a\") " Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.631291 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "1fedfbd6-7026-49ff-b4cd-bc52a093e02a" (UID: "1fedfbd6-7026-49ff-b4cd-bc52a093e02a"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.631409 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-kube-api-access-vqblg" (OuterVolumeSpecName: "kube-api-access-vqblg") pod "1fedfbd6-7026-49ff-b4cd-bc52a093e02a" (UID: "1fedfbd6-7026-49ff-b4cd-bc52a093e02a"). InnerVolumeSpecName "kube-api-access-vqblg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.634401 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "1fedfbd6-7026-49ff-b4cd-bc52a093e02a" (UID: "1fedfbd6-7026-49ff-b4cd-bc52a093e02a"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.727104 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqblg\" (UniqueName: \"kubernetes.io/projected/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-kube-api-access-vqblg\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.727322 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.727338 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1fedfbd6-7026-49ff-b4cd-bc52a093e02a-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.770066 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.827888 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvz8c\" (UniqueName: \"kubernetes.io/projected/68526bd8-8a1a-478b-8b12-3333bcaf29c8-kube-api-access-kvz8c\") pod \"68526bd8-8a1a-478b-8b12-3333bcaf29c8\" (UID: \"68526bd8-8a1a-478b-8b12-3333bcaf29c8\") " Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.831472 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68526bd8-8a1a-478b-8b12-3333bcaf29c8-kube-api-access-kvz8c" (OuterVolumeSpecName: "kube-api-access-kvz8c") pod "68526bd8-8a1a-478b-8b12-3333bcaf29c8" (UID: "68526bd8-8a1a-478b-8b12-3333bcaf29c8"). InnerVolumeSpecName "kube-api-access-kvz8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:34 crc kubenswrapper[5003]: I0126 11:07:34.929443 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvz8c\" (UniqueName: \"kubernetes.io/projected/68526bd8-8a1a-478b-8b12-3333bcaf29c8-kube-api-access-kvz8c\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.008879 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cedfada-2004-4908-b8eb-cbc066c92dd9" path="/var/lib/kubelet/pods/8cedfada-2004-4908-b8eb-cbc066c92dd9/volumes" Jan 26 11:07:35 crc kubenswrapper[5003]: E0126 11:07:35.030656 5003 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 26 11:07:35 crc kubenswrapper[5003]: E0126 11:07:35.030716 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts podName:afd987da-215e-4edc-940c-1529b3531bf1 nodeName:}" failed. No retries permitted until 2026-01-26 11:07:43.030703381 +0000 UTC m=+1478.571928942 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts") pod "keystone2760-account-delete-52t5q" (UID: "afd987da-215e-4edc-940c-1529b3531bf1") : configmap "openstack-scripts" not found Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.425806 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" event={"ID":"1fedfbd6-7026-49ff-b4cd-bc52a093e02a","Type":"ContainerDied","Data":"6a2e4058eed0ace21adc161fc1863f327601179241e40a0921b449cb01448749"} Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.425866 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg" Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.425876 5003 scope.go:117] "RemoveContainer" containerID="8360016c2d0f656d8d226388fceecd8f45d0f8acf100692ff2ffbf98ad3c54d8" Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.432438 5003 generic.go:334] "Generic (PLEG): container finished" podID="68526bd8-8a1a-478b-8b12-3333bcaf29c8" containerID="ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40" exitCode=0 Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.432478 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-r8mc9" event={"ID":"68526bd8-8a1a-478b-8b12-3333bcaf29c8","Type":"ContainerDied","Data":"ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40"} Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.432504 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-r8mc9" event={"ID":"68526bd8-8a1a-478b-8b12-3333bcaf29c8","Type":"ContainerDied","Data":"a7b4b45a7dba931eb3df6ce15abed7f0f4c1971d93dc90adfe9c36cd25acf9c1"} Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.432529 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-r8mc9" Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.461712 5003 scope.go:117] "RemoveContainer" containerID="ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40" Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.464123 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg"] Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.472918 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-759678c54b-zj6mg"] Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.485141 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-index-r8mc9"] Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.488445 5003 scope.go:117] "RemoveContainer" containerID="ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40" Jan 26 11:07:35 crc kubenswrapper[5003]: E0126 11:07:35.489103 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40\": container with ID starting with ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40 not found: ID does not exist" containerID="ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40" Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.489140 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40"} err="failed to get container status \"ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40\": rpc error: code = NotFound desc = could not find container \"ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40\": container with ID starting with ad7900e76bc0190c436d8833b604faecd69b1b1692016a52a835325f33513b40 not found: ID does not exist" Jan 26 11:07:35 crc kubenswrapper[5003]: I0126 11:07:35.490957 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/barbican-operator-index-r8mc9"] Jan 26 11:07:36 crc kubenswrapper[5003]: I0126 11:07:36.559049 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z"] Jan 26 11:07:36 crc kubenswrapper[5003]: I0126 11:07:36.561423 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" podUID="9587e58d-66ff-4a24-8373-58c7d6946575" containerName="manager" containerID="cri-o://9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350" gracePeriod=10 Jan 26 11:07:36 crc kubenswrapper[5003]: I0126 11:07:36.605920 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" podUID="9587e58d-66ff-4a24-8373-58c7d6946575" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.78:8081/readyz\": dial tcp 10.217.0.78:8081: connect: connection refused" Jan 26 11:07:36 crc kubenswrapper[5003]: I0126 11:07:36.778164 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-sfj7j"] Jan 26 11:07:36 crc kubenswrapper[5003]: I0126 11:07:36.778430 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-sfj7j" podUID="a8d40265-176c-4b0c-add2-7d7ec6c76f50" containerName="registry-server" containerID="cri-o://0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702" gracePeriod=30 Jan 26 11:07:36 crc kubenswrapper[5003]: I0126 11:07:36.815013 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899"] Jan 26 11:07:36 crc kubenswrapper[5003]: I0126 11:07:36.820733 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/810ee47ade577678291ae176be4e20f603ed854e1e1c3c96e3f3e8130ehm899"] Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.009680 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fedfbd6-7026-49ff-b4cd-bc52a093e02a" path="/var/lib/kubelet/pods/1fedfbd6-7026-49ff-b4cd-bc52a093e02a/volumes" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.010199 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33f7fd58-e985-460d-a939-7b9bf745fdae" path="/var/lib/kubelet/pods/33f7fd58-e985-460d-a939-7b9bf745fdae/volumes" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.010825 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68526bd8-8a1a-478b-8b12-3333bcaf29c8" path="/var/lib/kubelet/pods/68526bd8-8a1a-478b-8b12-3333bcaf29c8/volumes" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.104081 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.159598 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-webhook-cert\") pod \"9587e58d-66ff-4a24-8373-58c7d6946575\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.159672 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2pvp\" (UniqueName: \"kubernetes.io/projected/9587e58d-66ff-4a24-8373-58c7d6946575-kube-api-access-w2pvp\") pod \"9587e58d-66ff-4a24-8373-58c7d6946575\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.159778 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-apiservice-cert\") pod \"9587e58d-66ff-4a24-8373-58c7d6946575\" (UID: \"9587e58d-66ff-4a24-8373-58c7d6946575\") " Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.164346 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "9587e58d-66ff-4a24-8373-58c7d6946575" (UID: "9587e58d-66ff-4a24-8373-58c7d6946575"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.169496 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9587e58d-66ff-4a24-8373-58c7d6946575-kube-api-access-w2pvp" (OuterVolumeSpecName: "kube-api-access-w2pvp") pod "9587e58d-66ff-4a24-8373-58c7d6946575" (UID: "9587e58d-66ff-4a24-8373-58c7d6946575"). InnerVolumeSpecName "kube-api-access-w2pvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.181548 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "9587e58d-66ff-4a24-8373-58c7d6946575" (UID: "9587e58d-66ff-4a24-8373-58c7d6946575"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:37 crc kubenswrapper[5003]: E0126 11:07:37.216906 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702 is running failed: container process not found" containerID="0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 11:07:37 crc kubenswrapper[5003]: E0126 11:07:37.218869 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702 is running failed: container process not found" containerID="0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 11:07:37 crc kubenswrapper[5003]: E0126 11:07:37.219144 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702 is running failed: container process not found" containerID="0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 11:07:37 crc kubenswrapper[5003]: E0126 11:07:37.219184 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702 is running failed: container process not found" probeType="Readiness" pod="openstack-operators/keystone-operator-index-sfj7j" podUID="a8d40265-176c-4b0c-add2-7d7ec6c76f50" containerName="registry-server" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.261525 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.261554 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2pvp\" (UniqueName: \"kubernetes.io/projected/9587e58d-66ff-4a24-8373-58c7d6946575-kube-api-access-w2pvp\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.261565 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9587e58d-66ff-4a24-8373-58c7d6946575-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.344760 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.453665 5003 generic.go:334] "Generic (PLEG): container finished" podID="a8d40265-176c-4b0c-add2-7d7ec6c76f50" containerID="0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702" exitCode=0 Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.453713 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-sfj7j" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.453715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-sfj7j" event={"ID":"a8d40265-176c-4b0c-add2-7d7ec6c76f50","Type":"ContainerDied","Data":"0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702"} Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.453764 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-sfj7j" event={"ID":"a8d40265-176c-4b0c-add2-7d7ec6c76f50","Type":"ContainerDied","Data":"4af33e1aece999f669fd0d0ea76b32dd4c0c2644e04cd449376596b997022b27"} Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.453786 5003 scope.go:117] "RemoveContainer" containerID="0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.455553 5003 generic.go:334] "Generic (PLEG): container finished" podID="9587e58d-66ff-4a24-8373-58c7d6946575" containerID="9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350" exitCode=0 Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.455594 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.455600 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" event={"ID":"9587e58d-66ff-4a24-8373-58c7d6946575","Type":"ContainerDied","Data":"9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350"} Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.455628 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z" event={"ID":"9587e58d-66ff-4a24-8373-58c7d6946575","Type":"ContainerDied","Data":"2f29a578dce0531e6470202e3db1578b6d6bf0c5634ab34fb4b178f6e0d4e632"} Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.463455 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbv6m\" (UniqueName: \"kubernetes.io/projected/a8d40265-176c-4b0c-add2-7d7ec6c76f50-kube-api-access-fbv6m\") pod \"a8d40265-176c-4b0c-add2-7d7ec6c76f50\" (UID: \"a8d40265-176c-4b0c-add2-7d7ec6c76f50\") " Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.467808 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8d40265-176c-4b0c-add2-7d7ec6c76f50-kube-api-access-fbv6m" (OuterVolumeSpecName: "kube-api-access-fbv6m") pod "a8d40265-176c-4b0c-add2-7d7ec6c76f50" (UID: "a8d40265-176c-4b0c-add2-7d7ec6c76f50"). InnerVolumeSpecName "kube-api-access-fbv6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.475044 5003 scope.go:117] "RemoveContainer" containerID="0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702" Jan 26 11:07:37 crc kubenswrapper[5003]: E0126 11:07:37.479148 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702\": container with ID starting with 0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702 not found: ID does not exist" containerID="0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.479418 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702"} err="failed to get container status \"0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702\": rpc error: code = NotFound desc = could not find container \"0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702\": container with ID starting with 0327a4ed19740d5b06e261c488e7d365c8f51c877e32881d680e5f639b981702 not found: ID does not exist" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.479448 5003 scope.go:117] "RemoveContainer" containerID="9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.493570 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z"] Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.497643 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55585fc49f-hpp2z"] Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.500000 5003 scope.go:117] "RemoveContainer" containerID="9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350" Jan 26 11:07:37 crc kubenswrapper[5003]: E0126 11:07:37.500395 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350\": container with ID starting with 9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350 not found: ID does not exist" containerID="9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.500453 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350"} err="failed to get container status \"9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350\": rpc error: code = NotFound desc = could not find container \"9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350\": container with ID starting with 9da371554c48247932aa231a19092402dbe8717483c4944772e390d2198f6350 not found: ID does not exist" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.564801 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbv6m\" (UniqueName: \"kubernetes.io/projected/a8d40265-176c-4b0c-add2-7d7ec6c76f50-kube-api-access-fbv6m\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.783085 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-sfj7j"] Jan 26 11:07:37 crc kubenswrapper[5003]: I0126 11:07:37.790605 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-sfj7j"] Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.009665 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9587e58d-66ff-4a24-8373-58c7d6946575" path="/var/lib/kubelet/pods/9587e58d-66ff-4a24-8373-58c7d6946575/volumes" Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.010673 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8d40265-176c-4b0c-add2-7d7ec6c76f50" path="/var/lib/kubelet/pods/a8d40265-176c-4b0c-add2-7d7ec6c76f50/volumes" Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.039840 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.039910 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.398716 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4"] Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.398946 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" podUID="5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7" containerName="operator" containerID="cri-o://a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5" gracePeriod=10 Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.597865 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tqpvl"] Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.598061 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" podUID="7e38fee1-5f72-4d2e-9db9-64bd94887318" containerName="registry-server" containerID="cri-o://43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f" gracePeriod=30 Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.650210 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp"] Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.653544 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590qfdcp"] Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.766313 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.897564 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dzqx\" (UniqueName: \"kubernetes.io/projected/5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7-kube-api-access-7dzqx\") pod \"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7\" (UID: \"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7\") " Jan 26 11:07:39 crc kubenswrapper[5003]: I0126 11:07:39.930151 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7-kube-api-access-7dzqx" (OuterVolumeSpecName: "kube-api-access-7dzqx") pod "5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7" (UID: "5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7"). InnerVolumeSpecName "kube-api-access-7dzqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.000180 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dzqx\" (UniqueName: \"kubernetes.io/projected/5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7-kube-api-access-7dzqx\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.031692 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.101920 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gzgp\" (UniqueName: \"kubernetes.io/projected/7e38fee1-5f72-4d2e-9db9-64bd94887318-kube-api-access-2gzgp\") pod \"7e38fee1-5f72-4d2e-9db9-64bd94887318\" (UID: \"7e38fee1-5f72-4d2e-9db9-64bd94887318\") " Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.104306 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e38fee1-5f72-4d2e-9db9-64bd94887318-kube-api-access-2gzgp" (OuterVolumeSpecName: "kube-api-access-2gzgp") pod "7e38fee1-5f72-4d2e-9db9-64bd94887318" (UID: "7e38fee1-5f72-4d2e-9db9-64bd94887318"). InnerVolumeSpecName "kube-api-access-2gzgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.203975 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gzgp\" (UniqueName: \"kubernetes.io/projected/7e38fee1-5f72-4d2e-9db9-64bd94887318-kube-api-access-2gzgp\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.492652 5003 generic.go:334] "Generic (PLEG): container finished" podID="5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7" containerID="a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5" exitCode=0 Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.492747 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" event={"ID":"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7","Type":"ContainerDied","Data":"a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5"} Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.492750 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.492787 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4" event={"ID":"5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7","Type":"ContainerDied","Data":"3f29437fde206ef86a5a8edc28a6b35cf22b6f0c4291c78054bad9772d6e9eac"} Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.492812 5003 scope.go:117] "RemoveContainer" containerID="a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.496743 5003 generic.go:334] "Generic (PLEG): container finished" podID="7e38fee1-5f72-4d2e-9db9-64bd94887318" containerID="43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f" exitCode=0 Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.496785 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" event={"ID":"7e38fee1-5f72-4d2e-9db9-64bd94887318","Type":"ContainerDied","Data":"43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f"} Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.496809 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" event={"ID":"7e38fee1-5f72-4d2e-9db9-64bd94887318","Type":"ContainerDied","Data":"a8cbcf74c56158a2a3bcab0cc4ab20e1abf0ce01d8abc64e6daf815f472693f8"} Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.496856 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-tqpvl" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.521596 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4"] Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.526925 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-8r4s4"] Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.544652 5003 scope.go:117] "RemoveContainer" containerID="a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5" Jan 26 11:07:40 crc kubenswrapper[5003]: E0126 11:07:40.545180 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5\": container with ID starting with a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5 not found: ID does not exist" containerID="a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.545219 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5"} err="failed to get container status \"a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5\": rpc error: code = NotFound desc = could not find container \"a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5\": container with ID starting with a9685b78c4e88b471c0aa7ce7b959d10b235efab5ab2506ee634a820e1f0d5d5 not found: ID does not exist" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.545241 5003 scope.go:117] "RemoveContainer" containerID="43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.555787 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tqpvl"] Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.565760 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tqpvl"] Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.570825 5003 scope.go:117] "RemoveContainer" containerID="43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f" Jan 26 11:07:40 crc kubenswrapper[5003]: E0126 11:07:40.572654 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f\": container with ID starting with 43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f not found: ID does not exist" containerID="43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f" Jan 26 11:07:40 crc kubenswrapper[5003]: I0126 11:07:40.572696 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f"} err="failed to get container status \"43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f\": rpc error: code = NotFound desc = could not find container \"43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f\": container with ID starting with 43d856949d1978ceabac3af3e00f3761391235db9e6583d9774d2849d7e3c64f not found: ID does not exist" Jan 26 11:07:41 crc kubenswrapper[5003]: I0126 11:07:41.009603 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7" path="/var/lib/kubelet/pods/5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7/volumes" Jan 26 11:07:41 crc kubenswrapper[5003]: I0126 11:07:41.010221 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e38fee1-5f72-4d2e-9db9-64bd94887318" path="/var/lib/kubelet/pods/7e38fee1-5f72-4d2e-9db9-64bd94887318/volumes" Jan 26 11:07:41 crc kubenswrapper[5003]: I0126 11:07:41.010928 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9532cd56-bc22-4155-ab14-7fcdc05a4748" path="/var/lib/kubelet/pods/9532cd56-bc22-4155-ab14-7fcdc05a4748/volumes" Jan 26 11:07:43 crc kubenswrapper[5003]: E0126 11:07:43.038788 5003 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 26 11:07:43 crc kubenswrapper[5003]: E0126 11:07:43.039105 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts podName:afd987da-215e-4edc-940c-1529b3531bf1 nodeName:}" failed. No retries permitted until 2026-01-26 11:07:59.039087155 +0000 UTC m=+1494.580312716 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts") pod "keystone2760-account-delete-52t5q" (UID: "afd987da-215e-4edc-940c-1529b3531bf1") : configmap "openstack-scripts" not found Jan 26 11:07:44 crc kubenswrapper[5003]: I0126 11:07:44.550489 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc"] Jan 26 11:07:44 crc kubenswrapper[5003]: I0126 11:07:44.551085 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" podUID="0460124e-d3ec-4069-ad36-914a93ef06cb" containerName="manager" containerID="cri-o://996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc" gracePeriod=10 Jan 26 11:07:44 crc kubenswrapper[5003]: I0126 11:07:44.769445 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-djwjt"] Jan 26 11:07:44 crc kubenswrapper[5003]: I0126 11:07:44.769677 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-djwjt" podUID="9e455f13-32bb-4f60-9624-678d440683ac" containerName="registry-server" containerID="cri-o://d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99" gracePeriod=30 Jan 26 11:07:44 crc kubenswrapper[5003]: I0126 11:07:44.813903 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j"] Jan 26 11:07:44 crc kubenswrapper[5003]: I0126 11:07:44.820234 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/ab4cebf9c8e9911cdf6a66ff2b7d90dca88985d852ea4187b325e8f162mxn9j"] Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.010787 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc780c04-9290-4f09-bcca-8777b3713c86" path="/var/lib/kubelet/pods/fc780c04-9290-4f09-bcca-8777b3713c86/volumes" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.074332 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.166716 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-webhook-cert\") pod \"0460124e-d3ec-4069-ad36-914a93ef06cb\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.167088 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-apiservice-cert\") pod \"0460124e-d3ec-4069-ad36-914a93ef06cb\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.167135 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb67b\" (UniqueName: \"kubernetes.io/projected/0460124e-d3ec-4069-ad36-914a93ef06cb-kube-api-access-gb67b\") pod \"0460124e-d3ec-4069-ad36-914a93ef06cb\" (UID: \"0460124e-d3ec-4069-ad36-914a93ef06cb\") " Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.172583 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0460124e-d3ec-4069-ad36-914a93ef06cb-kube-api-access-gb67b" (OuterVolumeSpecName: "kube-api-access-gb67b") pod "0460124e-d3ec-4069-ad36-914a93ef06cb" (UID: "0460124e-d3ec-4069-ad36-914a93ef06cb"). InnerVolumeSpecName "kube-api-access-gb67b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.172793 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "0460124e-d3ec-4069-ad36-914a93ef06cb" (UID: "0460124e-d3ec-4069-ad36-914a93ef06cb"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.173524 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "0460124e-d3ec-4069-ad36-914a93ef06cb" (UID: "0460124e-d3ec-4069-ad36-914a93ef06cb"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.181194 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-djwjt" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.268993 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6xsp\" (UniqueName: \"kubernetes.io/projected/9e455f13-32bb-4f60-9624-678d440683ac-kube-api-access-s6xsp\") pod \"9e455f13-32bb-4f60-9624-678d440683ac\" (UID: \"9e455f13-32bb-4f60-9624-678d440683ac\") " Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.269243 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.269257 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0460124e-d3ec-4069-ad36-914a93ef06cb-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.269267 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb67b\" (UniqueName: \"kubernetes.io/projected/0460124e-d3ec-4069-ad36-914a93ef06cb-kube-api-access-gb67b\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.272043 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e455f13-32bb-4f60-9624-678d440683ac-kube-api-access-s6xsp" (OuterVolumeSpecName: "kube-api-access-s6xsp") pod "9e455f13-32bb-4f60-9624-678d440683ac" (UID: "9e455f13-32bb-4f60-9624-678d440683ac"). InnerVolumeSpecName "kube-api-access-s6xsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.371059 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6xsp\" (UniqueName: \"kubernetes.io/projected/9e455f13-32bb-4f60-9624-678d440683ac-kube-api-access-s6xsp\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.538404 5003 generic.go:334] "Generic (PLEG): container finished" podID="9e455f13-32bb-4f60-9624-678d440683ac" containerID="d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99" exitCode=0 Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.538474 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-djwjt" event={"ID":"9e455f13-32bb-4f60-9624-678d440683ac","Type":"ContainerDied","Data":"d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99"} Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.538525 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-djwjt" event={"ID":"9e455f13-32bb-4f60-9624-678d440683ac","Type":"ContainerDied","Data":"b90a03dc72c1189b5000be57ca6aa9edb43390bd4bfa5da19e5f3b1a113f3354"} Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.538541 5003 scope.go:117] "RemoveContainer" containerID="d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.538545 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-djwjt" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.540592 5003 generic.go:334] "Generic (PLEG): container finished" podID="0460124e-d3ec-4069-ad36-914a93ef06cb" containerID="996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc" exitCode=0 Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.540612 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.540628 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" event={"ID":"0460124e-d3ec-4069-ad36-914a93ef06cb","Type":"ContainerDied","Data":"996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc"} Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.540737 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc" event={"ID":"0460124e-d3ec-4069-ad36-914a93ef06cb","Type":"ContainerDied","Data":"94a58289de7c560c787497f43bde652356879ba1befae823514ac284be2ae08b"} Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.562189 5003 scope.go:117] "RemoveContainer" containerID="d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99" Jan 26 11:07:45 crc kubenswrapper[5003]: E0126 11:07:45.562898 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99\": container with ID starting with d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99 not found: ID does not exist" containerID="d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.562937 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99"} err="failed to get container status \"d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99\": rpc error: code = NotFound desc = could not find container \"d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99\": container with ID starting with d56352fedf69c2a173a4060ec223da40400cf2cd5408103cf09eea5da166fc99 not found: ID does not exist" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.562956 5003 scope.go:117] "RemoveContainer" containerID="996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.579014 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-djwjt"] Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.583747 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-djwjt"] Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.587005 5003 scope.go:117] "RemoveContainer" containerID="996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc" Jan 26 11:07:45 crc kubenswrapper[5003]: E0126 11:07:45.587566 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc\": container with ID starting with 996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc not found: ID does not exist" containerID="996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.587596 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc"} err="failed to get container status \"996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc\": rpc error: code = NotFound desc = could not find container \"996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc\": container with ID starting with 996eca42ffab49405784bd7f8caa87abf6ebc6b281511933047e4ea9967e63dc not found: ID does not exist" Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.591796 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc"] Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.596694 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-fc4cd6d84-shrbc"] Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.854317 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b"] Jan 26 11:07:45 crc kubenswrapper[5003]: I0126 11:07:45.854806 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" podUID="65018825-022b-422d-9c2a-3f22f2619d1a" containerName="manager" containerID="cri-o://c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4" gracePeriod=10 Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.086782 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-7hhgf"] Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.087044 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-7hhgf" podUID="6c588250-7629-448a-9007-f31db35eab93" containerName="registry-server" containerID="cri-o://aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90" gracePeriod=30 Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.110914 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l"] Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.116365 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/2e919283e07611ac611f6219872c365c8d6dbd6aec009cc72be1adebbfrqq9l"] Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.297086 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.383092 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-apiservice-cert\") pod \"65018825-022b-422d-9c2a-3f22f2619d1a\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.383303 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k27tm\" (UniqueName: \"kubernetes.io/projected/65018825-022b-422d-9c2a-3f22f2619d1a-kube-api-access-k27tm\") pod \"65018825-022b-422d-9c2a-3f22f2619d1a\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.383338 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-webhook-cert\") pod \"65018825-022b-422d-9c2a-3f22f2619d1a\" (UID: \"65018825-022b-422d-9c2a-3f22f2619d1a\") " Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.388383 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "65018825-022b-422d-9c2a-3f22f2619d1a" (UID: "65018825-022b-422d-9c2a-3f22f2619d1a"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.388608 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "65018825-022b-422d-9c2a-3f22f2619d1a" (UID: "65018825-022b-422d-9c2a-3f22f2619d1a"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.389033 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65018825-022b-422d-9c2a-3f22f2619d1a-kube-api-access-k27tm" (OuterVolumeSpecName: "kube-api-access-k27tm") pod "65018825-022b-422d-9c2a-3f22f2619d1a" (UID: "65018825-022b-422d-9c2a-3f22f2619d1a"). InnerVolumeSpecName "kube-api-access-k27tm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.484452 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.484477 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k27tm\" (UniqueName: \"kubernetes.io/projected/65018825-022b-422d-9c2a-3f22f2619d1a-kube-api-access-k27tm\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.484489 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/65018825-022b-422d-9c2a-3f22f2619d1a-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.487914 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.550346 5003 generic.go:334] "Generic (PLEG): container finished" podID="65018825-022b-422d-9c2a-3f22f2619d1a" containerID="c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4" exitCode=0 Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.550415 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.550438 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" event={"ID":"65018825-022b-422d-9c2a-3f22f2619d1a","Type":"ContainerDied","Data":"c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4"} Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.550475 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b" event={"ID":"65018825-022b-422d-9c2a-3f22f2619d1a","Type":"ContainerDied","Data":"de2d284a1b855d15a43972864ae136e367fd73d8d80f5403d948a50ae9910db2"} Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.550498 5003 scope.go:117] "RemoveContainer" containerID="c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.556542 5003 generic.go:334] "Generic (PLEG): container finished" podID="6c588250-7629-448a-9007-f31db35eab93" containerID="aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90" exitCode=0 Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.556596 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-7hhgf" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.556601 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-7hhgf" event={"ID":"6c588250-7629-448a-9007-f31db35eab93","Type":"ContainerDied","Data":"aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90"} Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.556657 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-7hhgf" event={"ID":"6c588250-7629-448a-9007-f31db35eab93","Type":"ContainerDied","Data":"8927abc2579628f6a3ba9d80c9da069b57f323c97bb96722ff929492cd78d177"} Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.578754 5003 scope.go:117] "RemoveContainer" containerID="c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4" Jan 26 11:07:46 crc kubenswrapper[5003]: E0126 11:07:46.590611 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4\": container with ID starting with c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4 not found: ID does not exist" containerID="c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.590672 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4"} err="failed to get container status \"c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4\": rpc error: code = NotFound desc = could not find container \"c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4\": container with ID starting with c94056cc8ca6f4a988902a7e1fd8fbad3b798cdf6cf58f3fa68570819eebd0e4 not found: ID does not exist" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.590698 5003 scope.go:117] "RemoveContainer" containerID="aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.591365 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chz4n\" (UniqueName: \"kubernetes.io/projected/6c588250-7629-448a-9007-f31db35eab93-kube-api-access-chz4n\") pod \"6c588250-7629-448a-9007-f31db35eab93\" (UID: \"6c588250-7629-448a-9007-f31db35eab93\") " Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.592888 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b"] Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.594765 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c588250-7629-448a-9007-f31db35eab93-kube-api-access-chz4n" (OuterVolumeSpecName: "kube-api-access-chz4n") pod "6c588250-7629-448a-9007-f31db35eab93" (UID: "6c588250-7629-448a-9007-f31db35eab93"). InnerVolumeSpecName "kube-api-access-chz4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.600813 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-78fc8bbd6f-lbg4b"] Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.614616 5003 scope.go:117] "RemoveContainer" containerID="aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90" Jan 26 11:07:46 crc kubenswrapper[5003]: E0126 11:07:46.615149 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90\": container with ID starting with aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90 not found: ID does not exist" containerID="aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.615198 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90"} err="failed to get container status \"aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90\": rpc error: code = NotFound desc = could not find container \"aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90\": container with ID starting with aa65edb996335d2a6322dd7db9c56266d1d7085cf85b87f8cd36e3f339236c90 not found: ID does not exist" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.692881 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chz4n\" (UniqueName: \"kubernetes.io/projected/6c588250-7629-448a-9007-f31db35eab93-kube-api-access-chz4n\") on node \"crc\" DevicePath \"\"" Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.881799 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-7hhgf"] Jan 26 11:07:46 crc kubenswrapper[5003]: I0126 11:07:46.885228 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-7hhgf"] Jan 26 11:07:47 crc kubenswrapper[5003]: I0126 11:07:47.010180 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0460124e-d3ec-4069-ad36-914a93ef06cb" path="/var/lib/kubelet/pods/0460124e-d3ec-4069-ad36-914a93ef06cb/volumes" Jan 26 11:07:47 crc kubenswrapper[5003]: I0126 11:07:47.010939 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35801d0d-b495-49ae-85b2-3f3bccd0241c" path="/var/lib/kubelet/pods/35801d0d-b495-49ae-85b2-3f3bccd0241c/volumes" Jan 26 11:07:47 crc kubenswrapper[5003]: I0126 11:07:47.012063 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65018825-022b-422d-9c2a-3f22f2619d1a" path="/var/lib/kubelet/pods/65018825-022b-422d-9c2a-3f22f2619d1a/volumes" Jan 26 11:07:47 crc kubenswrapper[5003]: I0126 11:07:47.013600 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c588250-7629-448a-9007-f31db35eab93" path="/var/lib/kubelet/pods/6c588250-7629-448a-9007-f31db35eab93/volumes" Jan 26 11:07:47 crc kubenswrapper[5003]: I0126 11:07:47.014336 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e455f13-32bb-4f60-9624-678d440683ac" path="/var/lib/kubelet/pods/9e455f13-32bb-4f60-9624-678d440683ac/volumes" Jan 26 11:07:59 crc kubenswrapper[5003]: E0126 11:07:59.081684 5003 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 26 11:07:59 crc kubenswrapper[5003]: E0126 11:07:59.082222 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts podName:afd987da-215e-4edc-940c-1529b3531bf1 nodeName:}" failed. No retries permitted until 2026-01-26 11:08:31.082200561 +0000 UTC m=+1526.623426122 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts") pod "keystone2760-account-delete-52t5q" (UID: "afd987da-215e-4edc-940c-1529b3531bf1") : configmap "openstack-scripts" not found Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036416 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dczvs/must-gather-q8zwg"] Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036712 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" containerName="mysql-bootstrap" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036728 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" containerName="mysql-bootstrap" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036739 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036746 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036764 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7210ca64-60f9-4e11-bd2c-6e4905b0b948" containerName="memcached" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036772 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7210ca64-60f9-4e11-bd2c-6e4905b0b948" containerName="memcached" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036784 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0460124e-d3ec-4069-ad36-914a93ef06cb" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036792 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0460124e-d3ec-4069-ad36-914a93ef06cb" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036801 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e003e40c-8a7e-434a-b20a-1ac895a8d682" containerName="mariadb-account-delete" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036808 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e003e40c-8a7e-434a-b20a-1ac895a8d682" containerName="mariadb-account-delete" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036819 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerName="setup-container" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036826 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerName="setup-container" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036835 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036843 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036857 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" containerName="keystone-api" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036865 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" containerName="keystone-api" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036879 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fedfbd6-7026-49ff-b4cd-bc52a093e02a" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036886 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fedfbd6-7026-49ff-b4cd-bc52a093e02a" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036896 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerName="barbican-keystone-listener" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036903 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerName="barbican-keystone-listener" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036913 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d40265-176c-4b0c-add2-7d7ec6c76f50" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036921 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d40265-176c-4b0c-add2-7d7ec6c76f50" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036934 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api-log" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036941 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api-log" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036951 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerName="mysql-bootstrap" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036958 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerName="mysql-bootstrap" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036968 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerName="mysql-bootstrap" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036975 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerName="mysql-bootstrap" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.036988 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7" containerName="operator" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.036997 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7" containerName="operator" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037008 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c588250-7629-448a-9007-f31db35eab93" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037016 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c588250-7629-448a-9007-f31db35eab93" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037025 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6b37c7e-571e-4047-99a1-149f0956e9a8" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037033 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6b37c7e-571e-4047-99a1-149f0956e9a8" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037046 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9587e58d-66ff-4a24-8373-58c7d6946575" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037054 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9587e58d-66ff-4a24-8373-58c7d6946575" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037069 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68526bd8-8a1a-478b-8b12-3333bcaf29c8" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037077 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="68526bd8-8a1a-478b-8b12-3333bcaf29c8" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037088 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e455f13-32bb-4f60-9624-678d440683ac" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037096 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e455f13-32bb-4f60-9624-678d440683ac" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037104 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerName="barbican-keystone-listener-log" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037111 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerName="barbican-keystone-listener-log" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037123 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65018825-022b-422d-9c2a-3f22f2619d1a" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037131 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="65018825-022b-422d-9c2a-3f22f2619d1a" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037142 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e38fee1-5f72-4d2e-9db9-64bd94887318" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037151 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e38fee1-5f72-4d2e-9db9-64bd94887318" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037160 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037170 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037182 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" containerName="barbican-worker" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037189 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" containerName="barbican-worker" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037222 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcce3480-cea7-4075-80c8-60c85f8acdab" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037230 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcce3480-cea7-4075-80c8-60c85f8acdab" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037242 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" containerName="barbican-worker-log" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037251 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" containerName="barbican-worker-log" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037261 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerName="rabbitmq" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037268 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerName="rabbitmq" Jan 26 11:08:00 crc kubenswrapper[5003]: E0126 11:08:00.037298 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037306 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037428 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e38fee1-5f72-4d2e-9db9-64bd94887318" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037441 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e455f13-32bb-4f60-9624-678d440683ac" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037452 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd511f4f-c18a-4f7c-8fb9-1d760a3039ac" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037462 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerName="barbican-keystone-listener" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037472 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9587e58d-66ff-4a24-8373-58c7d6946575" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037481 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="68526bd8-8a1a-478b-8b12-3333bcaf29c8" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037492 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api-log" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037502 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e003e40c-8a7e-434a-b20a-1ac895a8d682" containerName="mariadb-account-delete" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037512 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8d40265-176c-4b0c-add2-7d7ec6c76f50" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037522 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fedfbd6-7026-49ff-b4cd-bc52a093e02a" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037530 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6b37c7e-571e-4047-99a1-149f0956e9a8" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037540 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="65018825-022b-422d-9c2a-3f22f2619d1a" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037549 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7210ca64-60f9-4e11-bd2c-6e4905b0b948" containerName="memcached" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037560 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d8bd836-ef6c-425f-b570-69c53560c715" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037568 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ca7900c-1191-4d34-a44f-29fd6d510d90" containerName="barbican-keystone-listener-log" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037579 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0460124e-d3ec-4069-ad36-914a93ef06cb" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037588 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c588250-7629-448a-9007-f31db35eab93" containerName="registry-server" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037601 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" containerName="barbican-worker" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037610 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="261ebec9-25ad-4434-bf06-3feeee0f0eff" containerName="galera" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037620 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5479a1b5-f4d1-4ff7-a602-c1cd068f3ee7" containerName="operator" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037630 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e9f91e0-3cda-46fd-9034-08b41bf5f546" containerName="barbican-api" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037640 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcce3480-cea7-4075-80c8-60c85f8acdab" containerName="manager" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037649 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aac059e-645d-4967-838a-e51e27aad2ac" containerName="barbican-worker-log" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037659 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd8ac9c-c534-47b0-89d0-d0d1db1f81a2" containerName="rabbitmq" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.037668 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="93f9e1ac-6a26-4ec2-aea0-53a9ea07fa9c" containerName="keystone-api" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.038390 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.040704 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-dczvs"/"default-dockercfg-qvxc9" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.040892 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-dczvs"/"kube-root-ca.crt" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.053344 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-dczvs"/"openshift-service-ca.crt" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.067439 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-dczvs/must-gather-q8zwg"] Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.213512 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/af0ae9dc-aef2-45db-9f30-87494fe64171-must-gather-output\") pod \"must-gather-q8zwg\" (UID: \"af0ae9dc-aef2-45db-9f30-87494fe64171\") " pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.213641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h65s2\" (UniqueName: \"kubernetes.io/projected/af0ae9dc-aef2-45db-9f30-87494fe64171-kube-api-access-h65s2\") pod \"must-gather-q8zwg\" (UID: \"af0ae9dc-aef2-45db-9f30-87494fe64171\") " pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.315234 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/af0ae9dc-aef2-45db-9f30-87494fe64171-must-gather-output\") pod \"must-gather-q8zwg\" (UID: \"af0ae9dc-aef2-45db-9f30-87494fe64171\") " pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.315326 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h65s2\" (UniqueName: \"kubernetes.io/projected/af0ae9dc-aef2-45db-9f30-87494fe64171-kube-api-access-h65s2\") pod \"must-gather-q8zwg\" (UID: \"af0ae9dc-aef2-45db-9f30-87494fe64171\") " pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.315740 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/af0ae9dc-aef2-45db-9f30-87494fe64171-must-gather-output\") pod \"must-gather-q8zwg\" (UID: \"af0ae9dc-aef2-45db-9f30-87494fe64171\") " pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.331990 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h65s2\" (UniqueName: \"kubernetes.io/projected/af0ae9dc-aef2-45db-9f30-87494fe64171-kube-api-access-h65s2\") pod \"must-gather-q8zwg\" (UID: \"af0ae9dc-aef2-45db-9f30-87494fe64171\") " pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.355089 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.754738 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-dczvs/must-gather-q8zwg"] Jan 26 11:08:00 crc kubenswrapper[5003]: W0126 11:08:00.778116 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf0ae9dc_aef2_45db_9f30_87494fe64171.slice/crio-f538cc39784ddfa323ea31a5ef2f03e26591f61e0a7f9009621941e5c7b063b0 WatchSource:0}: Error finding container f538cc39784ddfa323ea31a5ef2f03e26591f61e0a7f9009621941e5c7b063b0: Status 404 returned error can't find the container with id f538cc39784ddfa323ea31a5ef2f03e26591f61e0a7f9009621941e5c7b063b0 Jan 26 11:08:00 crc kubenswrapper[5003]: I0126 11:08:00.782607 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 11:08:01 crc kubenswrapper[5003]: I0126 11:08:01.683686 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dczvs/must-gather-q8zwg" event={"ID":"af0ae9dc-aef2-45db-9f30-87494fe64171","Type":"ContainerStarted","Data":"f538cc39784ddfa323ea31a5ef2f03e26591f61e0a7f9009621941e5c7b063b0"} Jan 26 11:08:01 crc kubenswrapper[5003]: I0126 11:08:01.688353 5003 generic.go:334] "Generic (PLEG): container finished" podID="afd987da-215e-4edc-940c-1529b3531bf1" containerID="d63d633fd5857b51aa6161ab6b3094bda0ede0e4f9ede3bcf3363f7df4090c1e" exitCode=137 Jan 26 11:08:01 crc kubenswrapper[5003]: I0126 11:08:01.688424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" event={"ID":"afd987da-215e-4edc-940c-1529b3531bf1","Type":"ContainerDied","Data":"d63d633fd5857b51aa6161ab6b3094bda0ede0e4f9ede3bcf3363f7df4090c1e"} Jan 26 11:08:01 crc kubenswrapper[5003]: I0126 11:08:01.915481 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.046104 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts\") pod \"afd987da-215e-4edc-940c-1529b3531bf1\" (UID: \"afd987da-215e-4edc-940c-1529b3531bf1\") " Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.046231 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rk5zv\" (UniqueName: \"kubernetes.io/projected/afd987da-215e-4edc-940c-1529b3531bf1-kube-api-access-rk5zv\") pod \"afd987da-215e-4edc-940c-1529b3531bf1\" (UID: \"afd987da-215e-4edc-940c-1529b3531bf1\") " Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.047371 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "afd987da-215e-4edc-940c-1529b3531bf1" (UID: "afd987da-215e-4edc-940c-1529b3531bf1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.076315 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afd987da-215e-4edc-940c-1529b3531bf1-kube-api-access-rk5zv" (OuterVolumeSpecName: "kube-api-access-rk5zv") pod "afd987da-215e-4edc-940c-1529b3531bf1" (UID: "afd987da-215e-4edc-940c-1529b3531bf1"). InnerVolumeSpecName "kube-api-access-rk5zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.148637 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afd987da-215e-4edc-940c-1529b3531bf1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.148669 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rk5zv\" (UniqueName: \"kubernetes.io/projected/afd987da-215e-4edc-940c-1529b3531bf1-kube-api-access-rk5zv\") on node \"crc\" DevicePath \"\"" Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.700056 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" event={"ID":"afd987da-215e-4edc-940c-1529b3531bf1","Type":"ContainerDied","Data":"b146ae51b1a3006f16f8a942ad82d449e9e3444f44d16d727dd0da9b25beea9b"} Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.700114 5003 scope.go:117] "RemoveContainer" containerID="d63d633fd5857b51aa6161ab6b3094bda0ede0e4f9ede3bcf3363f7df4090c1e" Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.700260 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone2760-account-delete-52t5q" Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.726997 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone2760-account-delete-52t5q"] Jan 26 11:08:02 crc kubenswrapper[5003]: I0126 11:08:02.730234 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone2760-account-delete-52t5q"] Jan 26 11:08:03 crc kubenswrapper[5003]: I0126 11:08:03.010038 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afd987da-215e-4edc-940c-1529b3531bf1" path="/var/lib/kubelet/pods/afd987da-215e-4edc-940c-1529b3531bf1/volumes" Jan 26 11:08:05 crc kubenswrapper[5003]: I0126 11:08:05.933066 5003 scope.go:117] "RemoveContainer" containerID="6f3f3e65776fe839fbe7aee66407550a1bf403f49a368da1a168ddec2611e879" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.369209 5003 scope.go:117] "RemoveContainer" containerID="e2d12eed1bca9a1a4a3763846200ba27d0f740f24867c893379ced65f6063f75" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.439456 5003 scope.go:117] "RemoveContainer" containerID="3de834bcaa9728f6de3d5a7c498b95dc2eca6b23f753d5e240d7512bba4a6348" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.464076 5003 scope.go:117] "RemoveContainer" containerID="912adaa9bef73e5f1d97033eb93b798efae215bd8772f43f103e39cf7aad8388" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.490717 5003 scope.go:117] "RemoveContainer" containerID="2fc7a4d553254bcfd70a6603c24123f5313042a2a942cd5c7ec6a56de544854a" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.526566 5003 scope.go:117] "RemoveContainer" containerID="863cbbb42dda1f2f274f0f2c3452a7cb31cc6e098ed381e878d0200619ba1b04" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.545436 5003 scope.go:117] "RemoveContainer" containerID="1bc756d14e4ef278dacef0892b46f5880a308907e1c2740d1ada43cb814c55b2" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.570998 5003 scope.go:117] "RemoveContainer" containerID="c28dd943aa6e48c92d943b57357db4d7a757ee3496e80c6f8afc1826f89555ac" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.594945 5003 scope.go:117] "RemoveContainer" containerID="aea8ef2f11deb741e9e98d6b61f2796f1a8a03a5993b75f8332894748caf5489" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.613334 5003 scope.go:117] "RemoveContainer" containerID="041eae90a52c3bb8cf61e357cb7cfc9eae16a6d670ec563ffd721323a623a543" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.631594 5003 scope.go:117] "RemoveContainer" containerID="b96573f3c47c517d1cfd1917cbda877c77d172f10174f8f67d024f81a4025c19" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.647916 5003 scope.go:117] "RemoveContainer" containerID="f60501dbdbcd028f28f0130101ec8e3936b9f30a713747db3a54ffd6c99af577" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.683458 5003 scope.go:117] "RemoveContainer" containerID="f12709316bf926a0e401de08ced7b85510db278f932efac7e5ba75658a9a6657" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.708086 5003 scope.go:117] "RemoveContainer" containerID="96b5fa51591d730dbcb0f4e2b6c193455cdaf0094140fe5682f912c818aa1316" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.745835 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dczvs/must-gather-q8zwg" event={"ID":"af0ae9dc-aef2-45db-9f30-87494fe64171","Type":"ContainerStarted","Data":"8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071"} Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.750926 5003 scope.go:117] "RemoveContainer" containerID="5cdaea9b636f11d0fb48e590deae50e2c4e15c0c8f5d4c656019ede0b2b44a63" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.775102 5003 scope.go:117] "RemoveContainer" containerID="94566121d3406f44f1ac5513fe57f3b41239f4f87b36ac20435b288ddc785e80" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.800365 5003 scope.go:117] "RemoveContainer" containerID="d0ee0f1ae0834b8c92fa8008b131bef63fb72cec700023820e98184dec9e6aad" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.837604 5003 scope.go:117] "RemoveContainer" containerID="ef4fc9a7abcd51e2ee632b9e5ac8d76aa4c55c2b913742a834636652e4441444" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.853482 5003 scope.go:117] "RemoveContainer" containerID="5b84314763903eda18ff9726b3817ee45dc85ecaf53a0756d21a03cb7724fd72" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.869920 5003 scope.go:117] "RemoveContainer" containerID="3f8e0fc04b3b211b98dd54ea444fd48d86d4fc6eae8d898598472e94ef2bb5ac" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.888043 5003 scope.go:117] "RemoveContainer" containerID="cb64f285d792c261060fd6ddf7d2f6c8d187e1c4445c755d57bb1da01ddfc111" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.906951 5003 scope.go:117] "RemoveContainer" containerID="6746cd0d8a9a4059c2ab789eb59c874a5a5a1fee7fe895034c3fe78272b86bad" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.922197 5003 scope.go:117] "RemoveContainer" containerID="3ff0af062e7fe30a5bd7967c45880a0510d64035e54c03f0f47f34ec58a93842" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.942244 5003 scope.go:117] "RemoveContainer" containerID="e04173a0052a85c4ef9a039ded556917dc49db18e7a7e5e7f1564993575fbb69" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.959352 5003 scope.go:117] "RemoveContainer" containerID="254dae06a9484a8ea1e2c3c9ba2c50aae867cc9ba39ce56dedddbb9aea4102c5" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.980523 5003 scope.go:117] "RemoveContainer" containerID="b5b8b7f1b7956b2dff3c4ff61fd4ba573bd8ac81f9d059977cc3d8425eb02601" Jan 26 11:08:06 crc kubenswrapper[5003]: I0126 11:08:06.998800 5003 scope.go:117] "RemoveContainer" containerID="ba6491ef7c3ca38b339ce9377796fbbc7e035ffc4e2bac9bf62b5f912f249fb7" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.014690 5003 scope.go:117] "RemoveContainer" containerID="3728c52b53f4478f4e64571c3cd299ad0d2dbcdc1eed0997b218681fbd3511dd" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.034264 5003 scope.go:117] "RemoveContainer" containerID="30c98d7026a10ccac11fbf49a5ac20f2cef4ed5e04d7647aa6e79e4d0237ac7a" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.049307 5003 scope.go:117] "RemoveContainer" containerID="4980f3504eb05cb29e3cdd365f7d46343966d9fd29860a7c25d731856895b254" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.065663 5003 scope.go:117] "RemoveContainer" containerID="2827e0c3a3aa8528c01134e6d85f5916096ce96047f70264b6cd6bf7110736e3" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.083631 5003 scope.go:117] "RemoveContainer" containerID="40f68e661a9dd524e00f97a9724bddfe1020d8ccf737c7184461d1bc029d39cd" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.113735 5003 scope.go:117] "RemoveContainer" containerID="b9c6b3334e92374c6d259b7ddf52a594a345cfb04f8f2c05427f6062bf09785a" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.130726 5003 scope.go:117] "RemoveContainer" containerID="d076e4e26807baf418aff0c3491c1722cbc26711fd90211216908a74466029d2" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.152337 5003 scope.go:117] "RemoveContainer" containerID="1370bc94a109255dfb407d6634c6abce5f17de89a132815fb30a26877826d375" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.169863 5003 scope.go:117] "RemoveContainer" containerID="ce3d943c634bc4c237bdb22ca4c56310b8e4f127467461e36d1d81f0e980aa04" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.197674 5003 scope.go:117] "RemoveContainer" containerID="210a9db31f9899abc57fa0dfa9e17b430cfed6645b9fdc9f74b9063389647aee" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.217239 5003 scope.go:117] "RemoveContainer" containerID="9d0ea22f8d5cbef2b6ac5abbfaa6920191e268f9920611f04794c7edaa2d3ed9" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.233252 5003 scope.go:117] "RemoveContainer" containerID="beed7503f1f5dfa9ee2d0fbea0fd91709abfe7b9e292ac30f92241f0e12e327f" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.248095 5003 scope.go:117] "RemoveContainer" containerID="ae8e8ebec6a74056fa7110c0c98ddfb1321b868bef9b07236ded09565e4d7dc4" Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.768896 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dczvs/must-gather-q8zwg" event={"ID":"af0ae9dc-aef2-45db-9f30-87494fe64171","Type":"ContainerStarted","Data":"eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4"} Jan 26 11:08:07 crc kubenswrapper[5003]: I0126 11:08:07.795666 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dczvs/must-gather-q8zwg" podStartSLOduration=3.112343394 podStartE2EDuration="8.79565178s" podCreationTimestamp="2026-01-26 11:07:59 +0000 UTC" firstStartedPulling="2026-01-26 11:08:00.782332643 +0000 UTC m=+1496.323558214" lastFinishedPulling="2026-01-26 11:08:06.465641039 +0000 UTC m=+1502.006866600" observedRunningTime="2026-01-26 11:08:07.793758896 +0000 UTC m=+1503.334984457" watchObservedRunningTime="2026-01-26 11:08:07.79565178 +0000 UTC m=+1503.336877341" Jan 26 11:08:09 crc kubenswrapper[5003]: I0126 11:08:09.040127 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:08:09 crc kubenswrapper[5003]: I0126 11:08:09.040185 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.594973 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k7hh6"] Jan 26 11:08:11 crc kubenswrapper[5003]: E0126 11:08:11.595448 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afd987da-215e-4edc-940c-1529b3531bf1" containerName="mariadb-account-delete" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.595460 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afd987da-215e-4edc-940c-1529b3531bf1" containerName="mariadb-account-delete" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.595559 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="afd987da-215e-4edc-940c-1529b3531bf1" containerName="mariadb-account-delete" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.596331 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.612224 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k7hh6"] Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.771447 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtjj2\" (UniqueName: \"kubernetes.io/projected/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-kube-api-access-rtjj2\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.771519 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-utilities\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.771545 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-catalog-content\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.872860 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-catalog-content\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.872953 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtjj2\" (UniqueName: \"kubernetes.io/projected/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-kube-api-access-rtjj2\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.873002 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-utilities\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.873558 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-catalog-content\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.873594 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-utilities\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.900235 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtjj2\" (UniqueName: \"kubernetes.io/projected/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-kube-api-access-rtjj2\") pod \"certified-operators-k7hh6\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:11 crc kubenswrapper[5003]: I0126 11:08:11.916966 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:12 crc kubenswrapper[5003]: I0126 11:08:12.366669 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k7hh6"] Jan 26 11:08:12 crc kubenswrapper[5003]: I0126 11:08:12.806224 5003 generic.go:334] "Generic (PLEG): container finished" podID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerID="8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646" exitCode=0 Jan 26 11:08:12 crc kubenswrapper[5003]: I0126 11:08:12.806294 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7hh6" event={"ID":"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801","Type":"ContainerDied","Data":"8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646"} Jan 26 11:08:12 crc kubenswrapper[5003]: I0126 11:08:12.807566 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7hh6" event={"ID":"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801","Type":"ContainerStarted","Data":"9acb719944c8822697b2a2fb87de1ed7bb9b0e7cf219b68fe03778e2fbd58560"} Jan 26 11:08:13 crc kubenswrapper[5003]: I0126 11:08:13.815604 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7hh6" event={"ID":"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801","Type":"ContainerStarted","Data":"41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e"} Jan 26 11:08:14 crc kubenswrapper[5003]: I0126 11:08:14.826088 5003 generic.go:334] "Generic (PLEG): container finished" podID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerID="41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e" exitCode=0 Jan 26 11:08:14 crc kubenswrapper[5003]: I0126 11:08:14.826352 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7hh6" event={"ID":"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801","Type":"ContainerDied","Data":"41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e"} Jan 26 11:08:15 crc kubenswrapper[5003]: I0126 11:08:15.836337 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7hh6" event={"ID":"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801","Type":"ContainerStarted","Data":"e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5"} Jan 26 11:08:15 crc kubenswrapper[5003]: I0126 11:08:15.862651 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k7hh6" podStartSLOduration=2.43404188 podStartE2EDuration="4.862632966s" podCreationTimestamp="2026-01-26 11:08:11 +0000 UTC" firstStartedPulling="2026-01-26 11:08:12.807519408 +0000 UTC m=+1508.348744969" lastFinishedPulling="2026-01-26 11:08:15.236110494 +0000 UTC m=+1510.777336055" observedRunningTime="2026-01-26 11:08:15.858811476 +0000 UTC m=+1511.400037037" watchObservedRunningTime="2026-01-26 11:08:15.862632966 +0000 UTC m=+1511.403858527" Jan 26 11:08:21 crc kubenswrapper[5003]: I0126 11:08:21.917989 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:21 crc kubenswrapper[5003]: I0126 11:08:21.918577 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:21 crc kubenswrapper[5003]: I0126 11:08:21.975363 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:22 crc kubenswrapper[5003]: I0126 11:08:22.927979 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:22 crc kubenswrapper[5003]: I0126 11:08:22.961517 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k7hh6"] Jan 26 11:08:24 crc kubenswrapper[5003]: I0126 11:08:24.889652 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k7hh6" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerName="registry-server" containerID="cri-o://e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5" gracePeriod=2 Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.803666 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.898460 5003 generic.go:334] "Generic (PLEG): container finished" podID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerID="e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5" exitCode=0 Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.898502 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7hh6" event={"ID":"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801","Type":"ContainerDied","Data":"e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5"} Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.898507 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7hh6" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.898538 5003 scope.go:117] "RemoveContainer" containerID="e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.898527 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7hh6" event={"ID":"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801","Type":"ContainerDied","Data":"9acb719944c8822697b2a2fb87de1ed7bb9b0e7cf219b68fe03778e2fbd58560"} Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.914671 5003 scope.go:117] "RemoveContainer" containerID="41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.934102 5003 scope.go:117] "RemoveContainer" containerID="8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.954244 5003 scope.go:117] "RemoveContainer" containerID="e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5" Jan 26 11:08:25 crc kubenswrapper[5003]: E0126 11:08:25.954704 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5\": container with ID starting with e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5 not found: ID does not exist" containerID="e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.954754 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5"} err="failed to get container status \"e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5\": rpc error: code = NotFound desc = could not find container \"e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5\": container with ID starting with e6197415f8f7b8d8ee3227cee1acbc53258f70640307e6f0453c63cef41f2cc5 not found: ID does not exist" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.954783 5003 scope.go:117] "RemoveContainer" containerID="41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e" Jan 26 11:08:25 crc kubenswrapper[5003]: E0126 11:08:25.955220 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e\": container with ID starting with 41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e not found: ID does not exist" containerID="41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.955256 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e"} err="failed to get container status \"41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e\": rpc error: code = NotFound desc = could not find container \"41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e\": container with ID starting with 41a77202da40ddcafc144c3e02dfc171ec7072f0b87a666473377a38d0bcb12e not found: ID does not exist" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.955301 5003 scope.go:117] "RemoveContainer" containerID="8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646" Jan 26 11:08:25 crc kubenswrapper[5003]: E0126 11:08:25.955586 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646\": container with ID starting with 8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646 not found: ID does not exist" containerID="8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.955626 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646"} err="failed to get container status \"8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646\": rpc error: code = NotFound desc = could not find container \"8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646\": container with ID starting with 8d15afa3851cfb9cb9454f3646db8626b17d23389ce34b6ab61de5665176c646 not found: ID does not exist" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.991036 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-utilities\") pod \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.991232 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtjj2\" (UniqueName: \"kubernetes.io/projected/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-kube-api-access-rtjj2\") pod \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.991256 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-catalog-content\") pod \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\" (UID: \"dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801\") " Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.991850 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-utilities" (OuterVolumeSpecName: "utilities") pod "dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" (UID: "dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:08:25 crc kubenswrapper[5003]: I0126 11:08:25.997909 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-kube-api-access-rtjj2" (OuterVolumeSpecName: "kube-api-access-rtjj2") pod "dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" (UID: "dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801"). InnerVolumeSpecName "kube-api-access-rtjj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:08:26 crc kubenswrapper[5003]: I0126 11:08:26.043150 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" (UID: "dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:08:26 crc kubenswrapper[5003]: I0126 11:08:26.092425 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtjj2\" (UniqueName: \"kubernetes.io/projected/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-kube-api-access-rtjj2\") on node \"crc\" DevicePath \"\"" Jan 26 11:08:26 crc kubenswrapper[5003]: I0126 11:08:26.092592 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 11:08:26 crc kubenswrapper[5003]: I0126 11:08:26.092608 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 11:08:26 crc kubenswrapper[5003]: I0126 11:08:26.224663 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k7hh6"] Jan 26 11:08:26 crc kubenswrapper[5003]: I0126 11:08:26.227821 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k7hh6"] Jan 26 11:08:27 crc kubenswrapper[5003]: I0126 11:08:27.008404 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" path="/var/lib/kubelet/pods/dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801/volumes" Jan 26 11:08:32 crc kubenswrapper[5003]: I0126 11:08:32.906764 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6hlj9"] Jan 26 11:08:32 crc kubenswrapper[5003]: E0126 11:08:32.907518 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerName="extract-utilities" Jan 26 11:08:32 crc kubenswrapper[5003]: I0126 11:08:32.907534 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerName="extract-utilities" Jan 26 11:08:32 crc kubenswrapper[5003]: E0126 11:08:32.907552 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerName="registry-server" Jan 26 11:08:32 crc kubenswrapper[5003]: I0126 11:08:32.907560 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerName="registry-server" Jan 26 11:08:32 crc kubenswrapper[5003]: E0126 11:08:32.907579 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerName="extract-content" Jan 26 11:08:32 crc kubenswrapper[5003]: I0126 11:08:32.907587 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerName="extract-content" Jan 26 11:08:32 crc kubenswrapper[5003]: I0126 11:08:32.907727 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0ceb1e-65d5-4f82-8dfc-2e695fdbe801" containerName="registry-server" Jan 26 11:08:32 crc kubenswrapper[5003]: I0126 11:08:32.908730 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:32 crc kubenswrapper[5003]: I0126 11:08:32.918583 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6hlj9"] Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.082272 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-utilities\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.082389 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-catalog-content\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.082469 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvklt\" (UniqueName: \"kubernetes.io/projected/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-kube-api-access-dvklt\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.183379 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-catalog-content\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.183492 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvklt\" (UniqueName: \"kubernetes.io/projected/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-kube-api-access-dvklt\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.183519 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-utilities\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.184071 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-utilities\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.184072 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-catalog-content\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.203806 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvklt\" (UniqueName: \"kubernetes.io/projected/dc3bde44-36d1-42a3-9dc5-3d8205c1ccec-kube-api-access-dvklt\") pod \"community-operators-6hlj9\" (UID: \"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec\") " pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.266022 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.578967 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6hlj9"] Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.969513 5003 generic.go:334] "Generic (PLEG): container finished" podID="dc3bde44-36d1-42a3-9dc5-3d8205c1ccec" containerID="48c9b9e04baac66554625910bb551052319254fffaeb3d53367801ccaec7bb9d" exitCode=0 Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.969562 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hlj9" event={"ID":"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec","Type":"ContainerDied","Data":"48c9b9e04baac66554625910bb551052319254fffaeb3d53367801ccaec7bb9d"} Jan 26 11:08:33 crc kubenswrapper[5003]: I0126 11:08:33.969788 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hlj9" event={"ID":"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec","Type":"ContainerStarted","Data":"14f258b172973d1f23b7532249ec88f06523b245bdfd1361e1eecd7d33218708"} Jan 26 11:08:39 crc kubenswrapper[5003]: I0126 11:08:39.020243 5003 generic.go:334] "Generic (PLEG): container finished" podID="dc3bde44-36d1-42a3-9dc5-3d8205c1ccec" containerID="1ea1500370a6fe2f25cee131f6394097b7a2aef815710a40c97f5f9b3d43ab75" exitCode=0 Jan 26 11:08:39 crc kubenswrapper[5003]: I0126 11:08:39.020334 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hlj9" event={"ID":"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec","Type":"ContainerDied","Data":"1ea1500370a6fe2f25cee131f6394097b7a2aef815710a40c97f5f9b3d43ab75"} Jan 26 11:08:39 crc kubenswrapper[5003]: I0126 11:08:39.040493 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:08:39 crc kubenswrapper[5003]: I0126 11:08:39.040563 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:08:39 crc kubenswrapper[5003]: I0126 11:08:39.040619 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 11:08:39 crc kubenswrapper[5003]: I0126 11:08:39.041346 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 11:08:39 crc kubenswrapper[5003]: I0126 11:08:39.041402 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" gracePeriod=600 Jan 26 11:08:39 crc kubenswrapper[5003]: E0126 11:08:39.185442 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:08:40 crc kubenswrapper[5003]: I0126 11:08:40.029641 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hlj9" event={"ID":"dc3bde44-36d1-42a3-9dc5-3d8205c1ccec","Type":"ContainerStarted","Data":"807e7248f69cf17e74199e4185ae07f11dd1233d43654d16992529e433900688"} Jan 26 11:08:40 crc kubenswrapper[5003]: I0126 11:08:40.032416 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" exitCode=0 Jan 26 11:08:40 crc kubenswrapper[5003]: I0126 11:08:40.032470 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2"} Jan 26 11:08:40 crc kubenswrapper[5003]: I0126 11:08:40.032542 5003 scope.go:117] "RemoveContainer" containerID="d611de2469cbe98c2fe1bc7ea60af3e72e8a66e47fcfb0fbfee926d96efd43c1" Jan 26 11:08:40 crc kubenswrapper[5003]: I0126 11:08:40.032908 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:08:40 crc kubenswrapper[5003]: E0126 11:08:40.033120 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:08:40 crc kubenswrapper[5003]: I0126 11:08:40.055307 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6hlj9" podStartSLOduration=2.580152135 podStartE2EDuration="8.055267853s" podCreationTimestamp="2026-01-26 11:08:32 +0000 UTC" firstStartedPulling="2026-01-26 11:08:33.971102244 +0000 UTC m=+1529.512327805" lastFinishedPulling="2026-01-26 11:08:39.446217962 +0000 UTC m=+1534.987443523" observedRunningTime="2026-01-26 11:08:40.050315461 +0000 UTC m=+1535.591541042" watchObservedRunningTime="2026-01-26 11:08:40.055267853 +0000 UTC m=+1535.596493424" Jan 26 11:08:43 crc kubenswrapper[5003]: I0126 11:08:43.266960 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:43 crc kubenswrapper[5003]: I0126 11:08:43.267570 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:43 crc kubenswrapper[5003]: I0126 11:08:43.314235 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:44 crc kubenswrapper[5003]: I0126 11:08:44.103267 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6hlj9" Jan 26 11:08:44 crc kubenswrapper[5003]: I0126 11:08:44.169650 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6hlj9"] Jan 26 11:08:44 crc kubenswrapper[5003]: I0126 11:08:44.220738 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zccvn"] Jan 26 11:08:44 crc kubenswrapper[5003]: I0126 11:08:44.220971 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zccvn" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerName="registry-server" containerID="cri-o://765b83a9fdec456e3f2322711efb642bacfe11dedc9a34332cff3a401afdb5cc" gracePeriod=2 Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.071077 5003 generic.go:334] "Generic (PLEG): container finished" podID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerID="765b83a9fdec456e3f2322711efb642bacfe11dedc9a34332cff3a401afdb5cc" exitCode=0 Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.071162 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zccvn" event={"ID":"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd","Type":"ContainerDied","Data":"765b83a9fdec456e3f2322711efb642bacfe11dedc9a34332cff3a401afdb5cc"} Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.071450 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zccvn" event={"ID":"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd","Type":"ContainerDied","Data":"fcbb1ac80c073d76e9b405cbd5f78152c7b74914d4a2e6f809a9548775670e45"} Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.071463 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcbb1ac80c073d76e9b405cbd5f78152c7b74914d4a2e6f809a9548775670e45" Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.078996 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zccvn" Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.178081 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smxxv\" (UniqueName: \"kubernetes.io/projected/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-kube-api-access-smxxv\") pod \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.178199 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-catalog-content\") pod \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.178260 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-utilities\") pod \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\" (UID: \"dfe4c3b2-3353-4091-bcdc-a63f51c76cfd\") " Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.180276 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-utilities" (OuterVolumeSpecName: "utilities") pod "dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" (UID: "dfe4c3b2-3353-4091-bcdc-a63f51c76cfd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.193452 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-kube-api-access-smxxv" (OuterVolumeSpecName: "kube-api-access-smxxv") pod "dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" (UID: "dfe4c3b2-3353-4091-bcdc-a63f51c76cfd"). InnerVolumeSpecName "kube-api-access-smxxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.236201 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" (UID: "dfe4c3b2-3353-4091-bcdc-a63f51c76cfd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.279451 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.279485 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 11:08:45 crc kubenswrapper[5003]: I0126 11:08:45.279496 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smxxv\" (UniqueName: \"kubernetes.io/projected/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd-kube-api-access-smxxv\") on node \"crc\" DevicePath \"\"" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.077137 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zccvn" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.107591 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zccvn"] Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.111034 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zccvn"] Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.552877 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dg7vq"] Jan 26 11:08:46 crc kubenswrapper[5003]: E0126 11:08:46.553088 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerName="extract-content" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.553102 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerName="extract-content" Jan 26 11:08:46 crc kubenswrapper[5003]: E0126 11:08:46.553110 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerName="registry-server" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.553116 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerName="registry-server" Jan 26 11:08:46 crc kubenswrapper[5003]: E0126 11:08:46.553128 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerName="extract-utilities" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.553135 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerName="extract-utilities" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.553244 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" containerName="registry-server" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.553999 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.572094 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dg7vq"] Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.596819 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-898tf\" (UniqueName: \"kubernetes.io/projected/450eb0d6-3b97-42aa-ae3f-99163c738868-kube-api-access-898tf\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.596928 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-utilities\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.596999 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-catalog-content\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.698713 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-898tf\" (UniqueName: \"kubernetes.io/projected/450eb0d6-3b97-42aa-ae3f-99163c738868-kube-api-access-898tf\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.698803 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-utilities\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.698872 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-catalog-content\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.699377 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-catalog-content\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.699540 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-utilities\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.725016 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-898tf\" (UniqueName: \"kubernetes.io/projected/450eb0d6-3b97-42aa-ae3f-99163c738868-kube-api-access-898tf\") pod \"redhat-operators-dg7vq\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:46 crc kubenswrapper[5003]: I0126 11:08:46.868725 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:47 crc kubenswrapper[5003]: I0126 11:08:47.014492 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfe4c3b2-3353-4091-bcdc-a63f51c76cfd" path="/var/lib/kubelet/pods/dfe4c3b2-3353-4091-bcdc-a63f51c76cfd/volumes" Jan 26 11:08:47 crc kubenswrapper[5003]: I0126 11:08:47.145349 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dg7vq"] Jan 26 11:08:48 crc kubenswrapper[5003]: I0126 11:08:48.132265 5003 generic.go:334] "Generic (PLEG): container finished" podID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerID="9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d" exitCode=0 Jan 26 11:08:48 crc kubenswrapper[5003]: I0126 11:08:48.132322 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dg7vq" event={"ID":"450eb0d6-3b97-42aa-ae3f-99163c738868","Type":"ContainerDied","Data":"9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d"} Jan 26 11:08:48 crc kubenswrapper[5003]: I0126 11:08:48.132374 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dg7vq" event={"ID":"450eb0d6-3b97-42aa-ae3f-99163c738868","Type":"ContainerStarted","Data":"423b115d682fe5e55c16865daf2c48010227c4dfe9d4c949e090188105efdda5"} Jan 26 11:08:52 crc kubenswrapper[5003]: I0126 11:08:52.156790 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dg7vq" event={"ID":"450eb0d6-3b97-42aa-ae3f-99163c738868","Type":"ContainerStarted","Data":"4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e"} Jan 26 11:08:53 crc kubenswrapper[5003]: I0126 11:08:53.001815 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:08:53 crc kubenswrapper[5003]: E0126 11:08:53.002092 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:08:53 crc kubenswrapper[5003]: I0126 11:08:53.168222 5003 generic.go:334] "Generic (PLEG): container finished" podID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerID="4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e" exitCode=0 Jan 26 11:08:53 crc kubenswrapper[5003]: I0126 11:08:53.168307 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dg7vq" event={"ID":"450eb0d6-3b97-42aa-ae3f-99163c738868","Type":"ContainerDied","Data":"4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e"} Jan 26 11:08:55 crc kubenswrapper[5003]: I0126 11:08:55.181660 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dg7vq" event={"ID":"450eb0d6-3b97-42aa-ae3f-99163c738868","Type":"ContainerStarted","Data":"2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0"} Jan 26 11:08:56 crc kubenswrapper[5003]: I0126 11:08:56.207179 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dg7vq" podStartSLOduration=3.328442119 podStartE2EDuration="10.207165787s" podCreationTimestamp="2026-01-26 11:08:46 +0000 UTC" firstStartedPulling="2026-01-26 11:08:48.133917753 +0000 UTC m=+1543.675143314" lastFinishedPulling="2026-01-26 11:08:55.012641411 +0000 UTC m=+1550.553866982" observedRunningTime="2026-01-26 11:08:56.204694346 +0000 UTC m=+1551.745919907" watchObservedRunningTime="2026-01-26 11:08:56.207165787 +0000 UTC m=+1551.748391348" Jan 26 11:08:56 crc kubenswrapper[5003]: I0126 11:08:56.869839 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:56 crc kubenswrapper[5003]: I0126 11:08:56.869969 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:08:57 crc kubenswrapper[5003]: I0126 11:08:57.941616 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dg7vq" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="registry-server" probeResult="failure" output=< Jan 26 11:08:57 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 26 11:08:57 crc kubenswrapper[5003]: > Jan 26 11:09:02 crc kubenswrapper[5003]: I0126 11:09:02.857905 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5kwm8_c152b47d-1462-4bec-9048-37ce680c0d19/control-plane-machine-set-operator/0.log" Jan 26 11:09:03 crc kubenswrapper[5003]: I0126 11:09:03.049009 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7j782_f149d971-e11c-471d-91a2-a8e5ed472e41/machine-api-operator/0.log" Jan 26 11:09:03 crc kubenswrapper[5003]: I0126 11:09:03.061350 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7j782_f149d971-e11c-471d-91a2-a8e5ed472e41/kube-rbac-proxy/0.log" Jan 26 11:09:06 crc kubenswrapper[5003]: I0126 11:09:06.925864 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:09:06 crc kubenswrapper[5003]: I0126 11:09:06.970941 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:09:07 crc kubenswrapper[5003]: I0126 11:09:07.155821 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dg7vq"] Jan 26 11:09:07 crc kubenswrapper[5003]: I0126 11:09:07.710632 5003 scope.go:117] "RemoveContainer" containerID="f259088594911367b024408b1590172a888f97a734e2e2d25a371ac7a2317752" Jan 26 11:09:07 crc kubenswrapper[5003]: I0126 11:09:07.734634 5003 scope.go:117] "RemoveContainer" containerID="765b83a9fdec456e3f2322711efb642bacfe11dedc9a34332cff3a401afdb5cc" Jan 26 11:09:07 crc kubenswrapper[5003]: I0126 11:09:07.756588 5003 scope.go:117] "RemoveContainer" containerID="ecf1d84fdf6bce4b74db76fc407ac404661f011632c139443d47b5774a5576d5" Jan 26 11:09:07 crc kubenswrapper[5003]: I0126 11:09:07.784143 5003 scope.go:117] "RemoveContainer" containerID="d41ec46a0eb8dca2454ada3d69b62d04d5eb482eb423a3951e9a399a5a046858" Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.002063 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:09:08 crc kubenswrapper[5003]: E0126 11:09:08.003458 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.264001 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dg7vq" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="registry-server" containerID="cri-o://2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0" gracePeriod=2 Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.618268 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.810070 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-utilities\") pod \"450eb0d6-3b97-42aa-ae3f-99163c738868\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.810248 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-catalog-content\") pod \"450eb0d6-3b97-42aa-ae3f-99163c738868\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.810335 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-898tf\" (UniqueName: \"kubernetes.io/projected/450eb0d6-3b97-42aa-ae3f-99163c738868-kube-api-access-898tf\") pod \"450eb0d6-3b97-42aa-ae3f-99163c738868\" (UID: \"450eb0d6-3b97-42aa-ae3f-99163c738868\") " Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.811086 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-utilities" (OuterVolumeSpecName: "utilities") pod "450eb0d6-3b97-42aa-ae3f-99163c738868" (UID: "450eb0d6-3b97-42aa-ae3f-99163c738868"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.816885 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/450eb0d6-3b97-42aa-ae3f-99163c738868-kube-api-access-898tf" (OuterVolumeSpecName: "kube-api-access-898tf") pod "450eb0d6-3b97-42aa-ae3f-99163c738868" (UID: "450eb0d6-3b97-42aa-ae3f-99163c738868"). InnerVolumeSpecName "kube-api-access-898tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.912196 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-898tf\" (UniqueName: \"kubernetes.io/projected/450eb0d6-3b97-42aa-ae3f-99163c738868-kube-api-access-898tf\") on node \"crc\" DevicePath \"\"" Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.912242 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 11:09:08 crc kubenswrapper[5003]: I0126 11:09:08.965881 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "450eb0d6-3b97-42aa-ae3f-99163c738868" (UID: "450eb0d6-3b97-42aa-ae3f-99163c738868"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.013460 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/450eb0d6-3b97-42aa-ae3f-99163c738868-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.271250 5003 generic.go:334] "Generic (PLEG): container finished" podID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerID="2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0" exitCode=0 Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.271331 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dg7vq" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.271335 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dg7vq" event={"ID":"450eb0d6-3b97-42aa-ae3f-99163c738868","Type":"ContainerDied","Data":"2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0"} Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.271744 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dg7vq" event={"ID":"450eb0d6-3b97-42aa-ae3f-99163c738868","Type":"ContainerDied","Data":"423b115d682fe5e55c16865daf2c48010227c4dfe9d4c949e090188105efdda5"} Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.271779 5003 scope.go:117] "RemoveContainer" containerID="2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.292577 5003 scope.go:117] "RemoveContainer" containerID="4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.293912 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dg7vq"] Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.302051 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dg7vq"] Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.309262 5003 scope.go:117] "RemoveContainer" containerID="9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.336057 5003 scope.go:117] "RemoveContainer" containerID="2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0" Jan 26 11:09:09 crc kubenswrapper[5003]: E0126 11:09:09.336542 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0\": container with ID starting with 2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0 not found: ID does not exist" containerID="2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.336597 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0"} err="failed to get container status \"2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0\": rpc error: code = NotFound desc = could not find container \"2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0\": container with ID starting with 2b270818a8a268841731b9a53500b2578767c4ea7736506bc95661e870dac6c0 not found: ID does not exist" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.336624 5003 scope.go:117] "RemoveContainer" containerID="4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e" Jan 26 11:09:09 crc kubenswrapper[5003]: E0126 11:09:09.337052 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e\": container with ID starting with 4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e not found: ID does not exist" containerID="4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.337101 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e"} err="failed to get container status \"4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e\": rpc error: code = NotFound desc = could not find container \"4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e\": container with ID starting with 4d3e841f9ebed95bdc1dfb854a672183802037edbaa860afd92f19d8afde1d9e not found: ID does not exist" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.337119 5003 scope.go:117] "RemoveContainer" containerID="9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d" Jan 26 11:09:09 crc kubenswrapper[5003]: E0126 11:09:09.342922 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d\": container with ID starting with 9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d not found: ID does not exist" containerID="9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d" Jan 26 11:09:09 crc kubenswrapper[5003]: I0126 11:09:09.342973 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d"} err="failed to get container status \"9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d\": rpc error: code = NotFound desc = could not find container \"9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d\": container with ID starting with 9d4a733a1ed3ed1833d51c8eddbdafda6626df17826f3e25ab1867358f18365d not found: ID does not exist" Jan 26 11:09:11 crc kubenswrapper[5003]: I0126 11:09:11.008458 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" path="/var/lib/kubelet/pods/450eb0d6-3b97-42aa-ae3f-99163c738868/volumes" Jan 26 11:09:20 crc kubenswrapper[5003]: I0126 11:09:20.001382 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:09:20 crc kubenswrapper[5003]: E0126 11:09:20.002023 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.142984 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-m7jxn_0f4f27a6-9cd9-4b96-90d2-dd695d64362c/kube-rbac-proxy/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.210834 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-m7jxn_0f4f27a6-9cd9-4b96-90d2-dd695d64362c/controller/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.333879 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-frr-files/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.562346 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-frr-files/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.569171 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-reloader/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.573830 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-metrics/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.575986 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-reloader/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.771388 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-metrics/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.771846 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-reloader/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.778838 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-metrics/0.log" Jan 26 11:09:32 crc kubenswrapper[5003]: I0126 11:09:32.793632 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-frr-files/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.017213 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-frr-files/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.035245 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-metrics/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.046265 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-reloader/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.060477 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/controller/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.246995 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/frr-metrics/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.264542 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/kube-rbac-proxy-frr/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.289615 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/kube-rbac-proxy/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.436935 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/reloader/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.554715 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-75hzw_b36ce522-21fc-49c5-a4ff-6a6680060a85/frr-k8s-webhook-server/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.708174 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/frr/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.716335 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7dbc7b9d98-qtfvg_cbdc93bb-9371-4da1-843d-e5ec38ca21fd/manager/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.938327 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bfmvk_104f69d2-ed9b-4607-82f2-649871e3c881/kube-rbac-proxy/0.log" Jan 26 11:09:33 crc kubenswrapper[5003]: I0126 11:09:33.946737 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6ff7ff8d4c-slczq_69d7a94b-db77-4b43-8fce-9ab7a114d24c/webhook-server/0.log" Jan 26 11:09:34 crc kubenswrapper[5003]: I0126 11:09:34.001114 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:09:34 crc kubenswrapper[5003]: E0126 11:09:34.001349 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:09:34 crc kubenswrapper[5003]: I0126 11:09:34.098761 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bfmvk_104f69d2-ed9b-4607-82f2-649871e3c881/speaker/0.log" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.486738 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7qnqx"] Jan 26 11:09:41 crc kubenswrapper[5003]: E0126 11:09:41.487328 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="extract-utilities" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.487343 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="extract-utilities" Jan 26 11:09:41 crc kubenswrapper[5003]: E0126 11:09:41.487356 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="extract-content" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.487363 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="extract-content" Jan 26 11:09:41 crc kubenswrapper[5003]: E0126 11:09:41.487373 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="registry-server" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.487381 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="registry-server" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.487487 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="450eb0d6-3b97-42aa-ae3f-99163c738868" containerName="registry-server" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.488409 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.505998 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qnqx"] Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.623622 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82z6c\" (UniqueName: \"kubernetes.io/projected/67f8821f-6b1c-47ee-ae95-b8add514d9cf-kube-api-access-82z6c\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.623967 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-utilities\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.624018 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-catalog-content\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.724800 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-utilities\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.724862 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-catalog-content\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.724928 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82z6c\" (UniqueName: \"kubernetes.io/projected/67f8821f-6b1c-47ee-ae95-b8add514d9cf-kube-api-access-82z6c\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.725562 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-utilities\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.725628 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-catalog-content\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.746609 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82z6c\" (UniqueName: \"kubernetes.io/projected/67f8821f-6b1c-47ee-ae95-b8add514d9cf-kube-api-access-82z6c\") pod \"redhat-marketplace-7qnqx\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:41 crc kubenswrapper[5003]: I0126 11:09:41.808877 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:42 crc kubenswrapper[5003]: I0126 11:09:42.250621 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qnqx"] Jan 26 11:09:42 crc kubenswrapper[5003]: I0126 11:09:42.487378 5003 generic.go:334] "Generic (PLEG): container finished" podID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerID="6347fd548f317617fdf3ec3dd986565802b814813746a9a50765236b9a9f5202" exitCode=0 Jan 26 11:09:42 crc kubenswrapper[5003]: I0126 11:09:42.487541 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qnqx" event={"ID":"67f8821f-6b1c-47ee-ae95-b8add514d9cf","Type":"ContainerDied","Data":"6347fd548f317617fdf3ec3dd986565802b814813746a9a50765236b9a9f5202"} Jan 26 11:09:42 crc kubenswrapper[5003]: I0126 11:09:42.488455 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qnqx" event={"ID":"67f8821f-6b1c-47ee-ae95-b8add514d9cf","Type":"ContainerStarted","Data":"89370dbb47b1e08b2ad7d366ab6af1814acf45c6351c7e10ace0791331a25235"} Jan 26 11:09:43 crc kubenswrapper[5003]: I0126 11:09:43.496204 5003 generic.go:334] "Generic (PLEG): container finished" podID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerID="6934363bfdfbc1baced78b37ee0724455af5887f0dcffa74fb3fcf92caa7c29b" exitCode=0 Jan 26 11:09:43 crc kubenswrapper[5003]: I0126 11:09:43.496259 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qnqx" event={"ID":"67f8821f-6b1c-47ee-ae95-b8add514d9cf","Type":"ContainerDied","Data":"6934363bfdfbc1baced78b37ee0724455af5887f0dcffa74fb3fcf92caa7c29b"} Jan 26 11:09:44 crc kubenswrapper[5003]: I0126 11:09:44.504414 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qnqx" event={"ID":"67f8821f-6b1c-47ee-ae95-b8add514d9cf","Type":"ContainerStarted","Data":"53e44a0817b6b0d0512bf3aea691dbd4b49d58817b5597bdc64c394b724e4d9d"} Jan 26 11:09:44 crc kubenswrapper[5003]: I0126 11:09:44.523413 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7qnqx" podStartSLOduration=2.108280733 podStartE2EDuration="3.523394653s" podCreationTimestamp="2026-01-26 11:09:41 +0000 UTC" firstStartedPulling="2026-01-26 11:09:42.489342548 +0000 UTC m=+1598.030568119" lastFinishedPulling="2026-01-26 11:09:43.904456478 +0000 UTC m=+1599.445682039" observedRunningTime="2026-01-26 11:09:44.521668993 +0000 UTC m=+1600.062894554" watchObservedRunningTime="2026-01-26 11:09:44.523394653 +0000 UTC m=+1600.064620214" Jan 26 11:09:46 crc kubenswrapper[5003]: I0126 11:09:46.001315 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:09:46 crc kubenswrapper[5003]: E0126 11:09:46.001574 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:09:51 crc kubenswrapper[5003]: I0126 11:09:51.809407 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:51 crc kubenswrapper[5003]: I0126 11:09:51.809990 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:51 crc kubenswrapper[5003]: I0126 11:09:51.860299 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:52 crc kubenswrapper[5003]: I0126 11:09:52.595980 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:52 crc kubenswrapper[5003]: I0126 11:09:52.636568 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qnqx"] Jan 26 11:09:54 crc kubenswrapper[5003]: I0126 11:09:54.569079 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7qnqx" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerName="registry-server" containerID="cri-o://53e44a0817b6b0d0512bf3aea691dbd4b49d58817b5597bdc64c394b724e4d9d" gracePeriod=2 Jan 26 11:09:55 crc kubenswrapper[5003]: I0126 11:09:55.581259 5003 generic.go:334] "Generic (PLEG): container finished" podID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerID="53e44a0817b6b0d0512bf3aea691dbd4b49d58817b5597bdc64c394b724e4d9d" exitCode=0 Jan 26 11:09:55 crc kubenswrapper[5003]: I0126 11:09:55.581324 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qnqx" event={"ID":"67f8821f-6b1c-47ee-ae95-b8add514d9cf","Type":"ContainerDied","Data":"53e44a0817b6b0d0512bf3aea691dbd4b49d58817b5597bdc64c394b724e4d9d"} Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.066889 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.128231 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-catalog-content\") pod \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.151838 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67f8821f-6b1c-47ee-ae95-b8add514d9cf" (UID: "67f8821f-6b1c-47ee-ae95-b8add514d9cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.229316 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82z6c\" (UniqueName: \"kubernetes.io/projected/67f8821f-6b1c-47ee-ae95-b8add514d9cf-kube-api-access-82z6c\") pod \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.229546 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-utilities\") pod \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\" (UID: \"67f8821f-6b1c-47ee-ae95-b8add514d9cf\") " Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.229819 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.231908 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-utilities" (OuterVolumeSpecName: "utilities") pod "67f8821f-6b1c-47ee-ae95-b8add514d9cf" (UID: "67f8821f-6b1c-47ee-ae95-b8add514d9cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.238738 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67f8821f-6b1c-47ee-ae95-b8add514d9cf-kube-api-access-82z6c" (OuterVolumeSpecName: "kube-api-access-82z6c") pod "67f8821f-6b1c-47ee-ae95-b8add514d9cf" (UID: "67f8821f-6b1c-47ee-ae95-b8add514d9cf"). InnerVolumeSpecName "kube-api-access-82z6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.331360 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f8821f-6b1c-47ee-ae95-b8add514d9cf-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.331411 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82z6c\" (UniqueName: \"kubernetes.io/projected/67f8821f-6b1c-47ee-ae95-b8add514d9cf-kube-api-access-82z6c\") on node \"crc\" DevicePath \"\"" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.590042 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7qnqx" event={"ID":"67f8821f-6b1c-47ee-ae95-b8add514d9cf","Type":"ContainerDied","Data":"89370dbb47b1e08b2ad7d366ab6af1814acf45c6351c7e10ace0791331a25235"} Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.590101 5003 scope.go:117] "RemoveContainer" containerID="53e44a0817b6b0d0512bf3aea691dbd4b49d58817b5597bdc64c394b724e4d9d" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.590258 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7qnqx" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.611404 5003 scope.go:117] "RemoveContainer" containerID="6934363bfdfbc1baced78b37ee0724455af5887f0dcffa74fb3fcf92caa7c29b" Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.627267 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qnqx"] Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.637248 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7qnqx"] Jan 26 11:09:56 crc kubenswrapper[5003]: I0126 11:09:56.643413 5003 scope.go:117] "RemoveContainer" containerID="6347fd548f317617fdf3ec3dd986565802b814813746a9a50765236b9a9f5202" Jan 26 11:09:57 crc kubenswrapper[5003]: I0126 11:09:57.022356 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" path="/var/lib/kubelet/pods/67f8821f-6b1c-47ee-ae95-b8add514d9cf/volumes" Jan 26 11:09:59 crc kubenswrapper[5003]: I0126 11:09:59.002085 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:09:59 crc kubenswrapper[5003]: E0126 11:09:59.002412 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:10:00 crc kubenswrapper[5003]: I0126 11:10:00.558825 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/util/0.log" Jan 26 11:10:00 crc kubenswrapper[5003]: I0126 11:10:00.763306 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/pull/0.log" Jan 26 11:10:00 crc kubenswrapper[5003]: I0126 11:10:00.773337 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/util/0.log" Jan 26 11:10:00 crc kubenswrapper[5003]: I0126 11:10:00.790255 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/pull/0.log" Jan 26 11:10:00 crc kubenswrapper[5003]: I0126 11:10:00.960436 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/extract/0.log" Jan 26 11:10:00 crc kubenswrapper[5003]: I0126 11:10:00.963353 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/pull/0.log" Jan 26 11:10:00 crc kubenswrapper[5003]: I0126 11:10:00.977890 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/util/0.log" Jan 26 11:10:01 crc kubenswrapper[5003]: I0126 11:10:01.138889 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-utilities/0.log" Jan 26 11:10:01 crc kubenswrapper[5003]: I0126 11:10:01.343270 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-utilities/0.log" Jan 26 11:10:01 crc kubenswrapper[5003]: I0126 11:10:01.356908 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-content/0.log" Jan 26 11:10:01 crc kubenswrapper[5003]: I0126 11:10:01.365395 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-content/0.log" Jan 26 11:10:01 crc kubenswrapper[5003]: I0126 11:10:01.568323 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-content/0.log" Jan 26 11:10:01 crc kubenswrapper[5003]: I0126 11:10:01.570660 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-utilities/0.log" Jan 26 11:10:01 crc kubenswrapper[5003]: I0126 11:10:01.772719 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-utilities/0.log" Jan 26 11:10:01 crc kubenswrapper[5003]: I0126 11:10:01.805048 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/registry-server/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.029312 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-content/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.038028 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-content/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.050268 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-utilities/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.181074 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-utilities/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.185629 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-content/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.366776 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/registry-server/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.436315 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-ljw7m_4f101492-8469-482f-a258-7a3a4e9fade0/marketplace-operator/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.540734 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-utilities/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.703785 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-content/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.704476 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-content/0.log" Jan 26 11:10:02 crc kubenswrapper[5003]: I0126 11:10:02.710592 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-utilities/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.066689 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-content/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.085922 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-utilities/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.154702 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/registry-server/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.311621 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-utilities/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.472265 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-utilities/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.489831 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-content/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.559586 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-content/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.714741 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-utilities/0.log" Jan 26 11:10:03 crc kubenswrapper[5003]: I0126 11:10:03.765863 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-content/0.log" Jan 26 11:10:04 crc kubenswrapper[5003]: I0126 11:10:04.046561 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/registry-server/0.log" Jan 26 11:10:07 crc kubenswrapper[5003]: I0126 11:10:07.842892 5003 scope.go:117] "RemoveContainer" containerID="5dd9e7734cb77fc5bc4446e59d4ca97b938850a3b28ef7eaa13c7b81dfc7111a" Jan 26 11:10:07 crc kubenswrapper[5003]: I0126 11:10:07.869541 5003 scope.go:117] "RemoveContainer" containerID="023f0eaec1dd53f44fd127cf2e768f7b234b5705d970e992c494e89878e8895b" Jan 26 11:10:07 crc kubenswrapper[5003]: I0126 11:10:07.890199 5003 scope.go:117] "RemoveContainer" containerID="6447e5e50432c5444f94d13c04a931686ff50ffeb66e6656939310c7f6336729" Jan 26 11:10:14 crc kubenswrapper[5003]: I0126 11:10:14.002019 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:10:14 crc kubenswrapper[5003]: E0126 11:10:14.003962 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:10:25 crc kubenswrapper[5003]: I0126 11:10:25.006318 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:10:25 crc kubenswrapper[5003]: E0126 11:10:25.009341 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:10:38 crc kubenswrapper[5003]: I0126 11:10:38.001975 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:10:38 crc kubenswrapper[5003]: E0126 11:10:38.003113 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:10:52 crc kubenswrapper[5003]: I0126 11:10:52.001650 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:10:52 crc kubenswrapper[5003]: E0126 11:10:52.002241 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:11:05 crc kubenswrapper[5003]: I0126 11:11:05.011326 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:11:05 crc kubenswrapper[5003]: E0126 11:11:05.012552 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:11:07 crc kubenswrapper[5003]: I0126 11:11:07.992657 5003 scope.go:117] "RemoveContainer" containerID="a3e9bb68fd5364ac306256f0901236db32ecc139a99006907d1a35747931de7d" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.029949 5003 scope.go:117] "RemoveContainer" containerID="5c9caba665ad8644e75784ffcbd69d930eacb480b36e91cfd365eb7c9c96ae6f" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.053444 5003 scope.go:117] "RemoveContainer" containerID="bff9c806ed7cdb75c1f4838b9a094c93dc75ade4c30c0b29d24a1cf2577a3e99" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.070069 5003 scope.go:117] "RemoveContainer" containerID="4afe6b66ed2fd13ad641a9847b636ca80bb77c44eeae3e5205cb9852b91e1c24" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.090256 5003 scope.go:117] "RemoveContainer" containerID="b415c6a54f47f05402c3feef392d112113942b22d937deaa338e1bd319c82b54" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.108876 5003 scope.go:117] "RemoveContainer" containerID="3d339f409ffcb5325ad919da7c5d4cbda84d02d5b1e4439f03e3c1143cf7fe72" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.128185 5003 scope.go:117] "RemoveContainer" containerID="e34cb9c5f822beead5263fd88e158f66d3b44e29ea1a45b39948e974da9d3afa" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.144541 5003 scope.go:117] "RemoveContainer" containerID="23f16ba6e5ba6a363d0ee9ae965d1b10aa7a42784a6480cb45492d47033a7ba8" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.161101 5003 scope.go:117] "RemoveContainer" containerID="f646834e6851b96fb382e1ef2015754dc2c7b2787e5b226395b64e3b11d176a1" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.175412 5003 scope.go:117] "RemoveContainer" containerID="97a7f4949dee21f7325aece319ca46ed43183cb5ea18e9253c3f974b31284e87" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.191535 5003 scope.go:117] "RemoveContainer" containerID="eab743ecc90895a84bc0ce0d7cc79df23a1ceb78f36452966772b7ab9eab3467" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.207312 5003 scope.go:117] "RemoveContainer" containerID="44eb95174a9971d82c75beccef4e953696637bea5a347f6eaf3f257661d25582" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.225403 5003 scope.go:117] "RemoveContainer" containerID="f9a98aa39ad63d3e3cd260454a7a9de04082531369d39733e70305ac416cea9e" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.244970 5003 scope.go:117] "RemoveContainer" containerID="527c335c96bbde9f354d78b445883d59eef8e57fa945fe3616c7ffa9bbd6770c" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.260604 5003 scope.go:117] "RemoveContainer" containerID="c91ab6152d5ea9d28ccd73b6974a0f4d3883709c99852ba25bec0db8c9a6e5ac" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.276302 5003 scope.go:117] "RemoveContainer" containerID="def8593f5dc4e5b190a67ba07275990c971b8f57a48205aeed28d7799359a1cd" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.298478 5003 scope.go:117] "RemoveContainer" containerID="1d28eb610f8b253b7a6064cd6c60e01513782dca50d398abb69cf5666587bc32" Jan 26 11:11:08 crc kubenswrapper[5003]: I0126 11:11:08.315256 5003 scope.go:117] "RemoveContainer" containerID="b07a59c52b0cbdde0c15d799dc68288f4ee14a18c067573b81f96b3508b94432" Jan 26 11:11:20 crc kubenswrapper[5003]: I0126 11:11:20.002186 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:11:20 crc kubenswrapper[5003]: E0126 11:11:20.002990 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:11:24 crc kubenswrapper[5003]: I0126 11:11:24.249100 5003 generic.go:334] "Generic (PLEG): container finished" podID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerID="8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071" exitCode=0 Jan 26 11:11:24 crc kubenswrapper[5003]: I0126 11:11:24.249177 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dczvs/must-gather-q8zwg" event={"ID":"af0ae9dc-aef2-45db-9f30-87494fe64171","Type":"ContainerDied","Data":"8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071"} Jan 26 11:11:24 crc kubenswrapper[5003]: I0126 11:11:24.250105 5003 scope.go:117] "RemoveContainer" containerID="8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071" Jan 26 11:11:24 crc kubenswrapper[5003]: I0126 11:11:24.544494 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dczvs_must-gather-q8zwg_af0ae9dc-aef2-45db-9f30-87494fe64171/gather/0.log" Jan 26 11:11:31 crc kubenswrapper[5003]: I0126 11:11:31.578126 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dczvs/must-gather-q8zwg"] Jan 26 11:11:31 crc kubenswrapper[5003]: I0126 11:11:31.578829 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-dczvs/must-gather-q8zwg" podUID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerName="copy" containerID="cri-o://eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4" gracePeriod=2 Jan 26 11:11:31 crc kubenswrapper[5003]: I0126 11:11:31.584535 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dczvs/must-gather-q8zwg"] Jan 26 11:11:31 crc kubenswrapper[5003]: I0126 11:11:31.911505 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dczvs_must-gather-q8zwg_af0ae9dc-aef2-45db-9f30-87494fe64171/copy/0.log" Jan 26 11:11:31 crc kubenswrapper[5003]: I0126 11:11:31.912332 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:11:31 crc kubenswrapper[5003]: I0126 11:11:31.945144 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h65s2\" (UniqueName: \"kubernetes.io/projected/af0ae9dc-aef2-45db-9f30-87494fe64171-kube-api-access-h65s2\") pod \"af0ae9dc-aef2-45db-9f30-87494fe64171\" (UID: \"af0ae9dc-aef2-45db-9f30-87494fe64171\") " Jan 26 11:11:31 crc kubenswrapper[5003]: I0126 11:11:31.945225 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/af0ae9dc-aef2-45db-9f30-87494fe64171-must-gather-output\") pod \"af0ae9dc-aef2-45db-9f30-87494fe64171\" (UID: \"af0ae9dc-aef2-45db-9f30-87494fe64171\") " Jan 26 11:11:31 crc kubenswrapper[5003]: I0126 11:11:31.951532 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af0ae9dc-aef2-45db-9f30-87494fe64171-kube-api-access-h65s2" (OuterVolumeSpecName: "kube-api-access-h65s2") pod "af0ae9dc-aef2-45db-9f30-87494fe64171" (UID: "af0ae9dc-aef2-45db-9f30-87494fe64171"). InnerVolumeSpecName "kube-api-access-h65s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.010317 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af0ae9dc-aef2-45db-9f30-87494fe64171-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "af0ae9dc-aef2-45db-9f30-87494fe64171" (UID: "af0ae9dc-aef2-45db-9f30-87494fe64171"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.047940 5003 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/af0ae9dc-aef2-45db-9f30-87494fe64171-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.047978 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h65s2\" (UniqueName: \"kubernetes.io/projected/af0ae9dc-aef2-45db-9f30-87494fe64171-kube-api-access-h65s2\") on node \"crc\" DevicePath \"\"" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.300222 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dczvs_must-gather-q8zwg_af0ae9dc-aef2-45db-9f30-87494fe64171/copy/0.log" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.300859 5003 generic.go:334] "Generic (PLEG): container finished" podID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerID="eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4" exitCode=143 Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.300923 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dczvs/must-gather-q8zwg" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.300952 5003 scope.go:117] "RemoveContainer" containerID="eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.319978 5003 scope.go:117] "RemoveContainer" containerID="8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.384037 5003 scope.go:117] "RemoveContainer" containerID="eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4" Jan 26 11:11:32 crc kubenswrapper[5003]: E0126 11:11:32.384572 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4\": container with ID starting with eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4 not found: ID does not exist" containerID="eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.384614 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4"} err="failed to get container status \"eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4\": rpc error: code = NotFound desc = could not find container \"eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4\": container with ID starting with eeb6af05329d891a6fccd6c2595619a805a17f20606a4d2f215c7e6b72a78fe4 not found: ID does not exist" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.384637 5003 scope.go:117] "RemoveContainer" containerID="8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071" Jan 26 11:11:32 crc kubenswrapper[5003]: E0126 11:11:32.385044 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071\": container with ID starting with 8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071 not found: ID does not exist" containerID="8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071" Jan 26 11:11:32 crc kubenswrapper[5003]: I0126 11:11:32.385097 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071"} err="failed to get container status \"8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071\": rpc error: code = NotFound desc = could not find container \"8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071\": container with ID starting with 8cdcf867a29fb87d2e116e284bcbe874719fe3a4f1a4e64bfeb40d75eb072071 not found: ID does not exist" Jan 26 11:11:33 crc kubenswrapper[5003]: I0126 11:11:33.016315 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af0ae9dc-aef2-45db-9f30-87494fe64171" path="/var/lib/kubelet/pods/af0ae9dc-aef2-45db-9f30-87494fe64171/volumes" Jan 26 11:11:34 crc kubenswrapper[5003]: I0126 11:11:34.001853 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:11:34 crc kubenswrapper[5003]: E0126 11:11:34.002127 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:11:47 crc kubenswrapper[5003]: I0126 11:11:47.001944 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:11:47 crc kubenswrapper[5003]: E0126 11:11:47.003333 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:12:01 crc kubenswrapper[5003]: I0126 11:12:01.002432 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:12:01 crc kubenswrapper[5003]: E0126 11:12:01.003207 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.375745 5003 scope.go:117] "RemoveContainer" containerID="fe6a82e6537f704100d86733c47045a0dc11ad52f91e05cc9d27c53228f474aa" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.395051 5003 scope.go:117] "RemoveContainer" containerID="2aeafc4ab81c00b82fba0ca8086655e2659ed60f1a7aba204706e01651f8217f" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.414742 5003 scope.go:117] "RemoveContainer" containerID="d3e4b89d816b719481fc57de309d3c349994a5562e0428530b96dddee0f18332" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.432225 5003 scope.go:117] "RemoveContainer" containerID="26970c88275d831b8cfc37137ac332f72c9c80205bd79693e2fb844ca850c5d4" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.468409 5003 scope.go:117] "RemoveContainer" containerID="78c305d992140bcfbcf7965e3cc7061e2a9c92d5781980c307adf30362dff748" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.484811 5003 scope.go:117] "RemoveContainer" containerID="eb1fe80ff8611d70f827cd9cf0c1c51537e7e074100f3683bf5601788cd0e021" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.498674 5003 scope.go:117] "RemoveContainer" containerID="6856586b6764d2d9ade5ac7f106ccb105b3cea21024fb4cd45cea64a433a433a" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.511759 5003 scope.go:117] "RemoveContainer" containerID="e9d4e2006f4906e90bf1c99cfc7b76c8096502c24667a7891f56f3e031c65e92" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.525198 5003 scope.go:117] "RemoveContainer" containerID="b5778b163f43e4199e61bf680d32df88f835c04806af665bb62481b12c58c529" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.541292 5003 scope.go:117] "RemoveContainer" containerID="837369ec91ac552adf85b9327cfdbe55939dba4ab49d8e1df1ab28f68db74b0a" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.555834 5003 scope.go:117] "RemoveContainer" containerID="9525c4f6af285b83022dc7872ad7d96e0258131dd1b1b3a54621fece4d73b356" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.571858 5003 scope.go:117] "RemoveContainer" containerID="20361e02fa6e5504119da17f3f4317a34a78ea4e3d97257903221bf6a3465487" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.585474 5003 scope.go:117] "RemoveContainer" containerID="f56b122c36e67f251c10dde9254f2924069036f367b8b889092f255fcdc60473" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.599582 5003 scope.go:117] "RemoveContainer" containerID="1892c5df0f97ddc761455fbae7b3bc00f433ac82c936df6b99e816502660346d" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.614470 5003 scope.go:117] "RemoveContainer" containerID="c58218c1c9329d527320c12af5203b6766d66444d7396a6621fc110a94bf364b" Jan 26 11:12:08 crc kubenswrapper[5003]: I0126 11:12:08.627771 5003 scope.go:117] "RemoveContainer" containerID="ce3bd2f7dc397b83e2c98a106abb9f0255350ec691bff08ca793eea8359bb86b" Jan 26 11:12:14 crc kubenswrapper[5003]: I0126 11:12:14.001488 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:12:14 crc kubenswrapper[5003]: E0126 11:12:14.002346 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:12:28 crc kubenswrapper[5003]: I0126 11:12:28.002101 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:12:28 crc kubenswrapper[5003]: E0126 11:12:28.003015 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:12:39 crc kubenswrapper[5003]: I0126 11:12:39.001779 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:12:39 crc kubenswrapper[5003]: E0126 11:12:39.002401 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:12:53 crc kubenswrapper[5003]: I0126 11:12:53.002229 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:12:53 crc kubenswrapper[5003]: E0126 11:12:53.003012 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.002261 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:13:08 crc kubenswrapper[5003]: E0126 11:13:08.003067 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.679802 5003 scope.go:117] "RemoveContainer" containerID="44b815891ac54b4652e96219b9c9b5d317eba00737b48864dd7b056ddb2b38d7" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.705691 5003 scope.go:117] "RemoveContainer" containerID="2762f3e7b5eeaffe30706cd14ed8eeace62d4b088b74d37c258fd690c22363a4" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.729645 5003 scope.go:117] "RemoveContainer" containerID="0770c050ea8a958bb547dcb3d5c14051dcb8b1ccd15bfafed31c3f865fefdbbd" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.751043 5003 scope.go:117] "RemoveContainer" containerID="d7fb6878a92ee87d7db265eadfae05b3d0985acd163e665320512d839d610a3f" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.767584 5003 scope.go:117] "RemoveContainer" containerID="e9911461a08addd9f13a08160257457889a9722b9819d3456cc4a25adbf59a47" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.789645 5003 scope.go:117] "RemoveContainer" containerID="4907090f0c08ce892242dc6da0c571bfff88a65efbea27edf6c9bf1e7a8726e2" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.807250 5003 scope.go:117] "RemoveContainer" containerID="d65b34100918b96132397a5eee7049d097c8c2b314d41f577fb2fea7f465e561" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.823895 5003 scope.go:117] "RemoveContainer" containerID="5eaf8fd7a9541d6ecfd0db15ef13773e6e2560b30dc15a6f5186fbbc0110ba52" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.837844 5003 scope.go:117] "RemoveContainer" containerID="dd9212d7099a51e074a1a2848caa2609023fe3642ce7bc2ea60b12678db15303" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.850594 5003 scope.go:117] "RemoveContainer" containerID="40b32c6fc232474ff05130b599d33628d15d5395bc63c6a8e36c395a5987dbf7" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.870327 5003 scope.go:117] "RemoveContainer" containerID="2d3bb1e14fc5fa885525112e4de1e42cb1b952ff3d6d294295aca57f2c065bde" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.889265 5003 scope.go:117] "RemoveContainer" containerID="42f325d7b84c82e4f613405b4c7f0319f3de962a3658069d862d91a3503882d9" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.905561 5003 scope.go:117] "RemoveContainer" containerID="af0a0d209d83ae052e0b10fafada744ffd209e151109e3d6280db46737d11aa0" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.922961 5003 scope.go:117] "RemoveContainer" containerID="24eab1403956f1736a6551ce380aad9e6271e0fd4f6826b5076463e24266977f" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.936839 5003 scope.go:117] "RemoveContainer" containerID="09c77f38af2391a61d05a4f86f8d38370c6798f914310c787187c33e9665820f" Jan 26 11:13:08 crc kubenswrapper[5003]: I0126 11:13:08.953413 5003 scope.go:117] "RemoveContainer" containerID="4c32f17ff700698e9f6dc94973c47fc1e273b44494b6a3427b8b079646256ced" Jan 26 11:13:21 crc kubenswrapper[5003]: I0126 11:13:21.001905 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:13:21 crc kubenswrapper[5003]: E0126 11:13:21.002950 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:13:34 crc kubenswrapper[5003]: I0126 11:13:34.001480 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:13:34 crc kubenswrapper[5003]: E0126 11:13:34.003981 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m84kp_openshift-machine-config-operator(c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" Jan 26 11:13:48 crc kubenswrapper[5003]: I0126 11:13:48.002952 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:13:48 crc kubenswrapper[5003]: I0126 11:13:48.396064 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"6f8f59bbd54d6b22048eff02c77837365fc23e772c25a8528cb3daf5a8a768eb"} Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.690395 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-klk7q/must-gather-5g8tw"] Jan 26 11:13:58 crc kubenswrapper[5003]: E0126 11:13:58.691816 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerName="registry-server" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.691841 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerName="registry-server" Jan 26 11:13:58 crc kubenswrapper[5003]: E0126 11:13:58.691863 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerName="gather" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.691873 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerName="gather" Jan 26 11:13:58 crc kubenswrapper[5003]: E0126 11:13:58.691905 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerName="extract-content" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.691914 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerName="extract-content" Jan 26 11:13:58 crc kubenswrapper[5003]: E0126 11:13:58.691925 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerName="copy" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.691934 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerName="copy" Jan 26 11:13:58 crc kubenswrapper[5003]: E0126 11:13:58.691958 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerName="extract-utilities" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.691966 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerName="extract-utilities" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.692304 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerName="gather" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.692327 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="af0ae9dc-aef2-45db-9f30-87494fe64171" containerName="copy" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.692351 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="67f8821f-6b1c-47ee-ae95-b8add514d9cf" containerName="registry-server" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.693518 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.696386 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-klk7q"/"openshift-service-ca.crt" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.696461 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-klk7q"/"kube-root-ca.crt" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.707656 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-klk7q/must-gather-5g8tw"] Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.837315 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2e0201d1-f503-49a7-8c16-f1822619ec88-must-gather-output\") pod \"must-gather-5g8tw\" (UID: \"2e0201d1-f503-49a7-8c16-f1822619ec88\") " pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.837379 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59lgw\" (UniqueName: \"kubernetes.io/projected/2e0201d1-f503-49a7-8c16-f1822619ec88-kube-api-access-59lgw\") pod \"must-gather-5g8tw\" (UID: \"2e0201d1-f503-49a7-8c16-f1822619ec88\") " pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.938421 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2e0201d1-f503-49a7-8c16-f1822619ec88-must-gather-output\") pod \"must-gather-5g8tw\" (UID: \"2e0201d1-f503-49a7-8c16-f1822619ec88\") " pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.938470 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59lgw\" (UniqueName: \"kubernetes.io/projected/2e0201d1-f503-49a7-8c16-f1822619ec88-kube-api-access-59lgw\") pod \"must-gather-5g8tw\" (UID: \"2e0201d1-f503-49a7-8c16-f1822619ec88\") " pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.938861 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2e0201d1-f503-49a7-8c16-f1822619ec88-must-gather-output\") pod \"must-gather-5g8tw\" (UID: \"2e0201d1-f503-49a7-8c16-f1822619ec88\") " pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:13:58 crc kubenswrapper[5003]: I0126 11:13:58.956539 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59lgw\" (UniqueName: \"kubernetes.io/projected/2e0201d1-f503-49a7-8c16-f1822619ec88-kube-api-access-59lgw\") pod \"must-gather-5g8tw\" (UID: \"2e0201d1-f503-49a7-8c16-f1822619ec88\") " pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:13:59 crc kubenswrapper[5003]: I0126 11:13:59.027993 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:13:59 crc kubenswrapper[5003]: I0126 11:13:59.205770 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-klk7q/must-gather-5g8tw"] Jan 26 11:13:59 crc kubenswrapper[5003]: I0126 11:13:59.463655 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-klk7q/must-gather-5g8tw" event={"ID":"2e0201d1-f503-49a7-8c16-f1822619ec88","Type":"ContainerStarted","Data":"e269b69905c16e15dd6e76d854520f35d1804bdb7ece19aa2f488492ae82ecd1"} Jan 26 11:13:59 crc kubenswrapper[5003]: I0126 11:13:59.463991 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-klk7q/must-gather-5g8tw" event={"ID":"2e0201d1-f503-49a7-8c16-f1822619ec88","Type":"ContainerStarted","Data":"805cde4d7b808ce925f4c3b11c66faa22f0305f9173c23bb5afb2ad53ff9ab5f"} Jan 26 11:14:00 crc kubenswrapper[5003]: I0126 11:14:00.471425 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-klk7q/must-gather-5g8tw" event={"ID":"2e0201d1-f503-49a7-8c16-f1822619ec88","Type":"ContainerStarted","Data":"e77bb569d8c21b68e95d7d744a97fa40b3d4f7fbeb79fd82233bbfc5905957d6"} Jan 26 11:14:00 crc kubenswrapper[5003]: I0126 11:14:00.487927 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-klk7q/must-gather-5g8tw" podStartSLOduration=2.48790873 podStartE2EDuration="2.48790873s" podCreationTimestamp="2026-01-26 11:13:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:14:00.483412522 +0000 UTC m=+1856.024638083" watchObservedRunningTime="2026-01-26 11:14:00.48790873 +0000 UTC m=+1856.029134291" Jan 26 11:14:08 crc kubenswrapper[5003]: I0126 11:14:08.996854 5003 scope.go:117] "RemoveContainer" containerID="f986d3cb7935c14181483f8da787cf796fab5b4480960ed3ae2a7eaa7eef5fb0" Jan 26 11:14:48 crc kubenswrapper[5003]: I0126 11:14:48.488223 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5kwm8_c152b47d-1462-4bec-9048-37ce680c0d19/control-plane-machine-set-operator/0.log" Jan 26 11:14:48 crc kubenswrapper[5003]: I0126 11:14:48.667496 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7j782_f149d971-e11c-471d-91a2-a8e5ed472e41/machine-api-operator/0.log" Jan 26 11:14:48 crc kubenswrapper[5003]: I0126 11:14:48.670519 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7j782_f149d971-e11c-471d-91a2-a8e5ed472e41/kube-rbac-proxy/0.log" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.135312 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n"] Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.136779 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.138588 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.138863 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.146544 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n"] Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.211623 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/060f8233-5012-4f66-8a98-2c0515fc54af-secret-volume\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.211685 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/060f8233-5012-4f66-8a98-2c0515fc54af-config-volume\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.211735 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26whd\" (UniqueName: \"kubernetes.io/projected/060f8233-5012-4f66-8a98-2c0515fc54af-kube-api-access-26whd\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.313186 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/060f8233-5012-4f66-8a98-2c0515fc54af-secret-volume\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.313242 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/060f8233-5012-4f66-8a98-2c0515fc54af-config-volume\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.313275 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26whd\" (UniqueName: \"kubernetes.io/projected/060f8233-5012-4f66-8a98-2c0515fc54af-kube-api-access-26whd\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.314918 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/060f8233-5012-4f66-8a98-2c0515fc54af-config-volume\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.321358 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/060f8233-5012-4f66-8a98-2c0515fc54af-secret-volume\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.331601 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26whd\" (UniqueName: \"kubernetes.io/projected/060f8233-5012-4f66-8a98-2c0515fc54af-kube-api-access-26whd\") pod \"collect-profiles-29490435-l8w5n\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.456645 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.630214 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n"] Jan 26 11:15:00 crc kubenswrapper[5003]: W0126 11:15:00.643203 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod060f8233_5012_4f66_8a98_2c0515fc54af.slice/crio-eb42954cf6e13325cf25c3e2e67c09e2dec07fc810017aa7a581eb17426373a0 WatchSource:0}: Error finding container eb42954cf6e13325cf25c3e2e67c09e2dec07fc810017aa7a581eb17426373a0: Status 404 returned error can't find the container with id eb42954cf6e13325cf25c3e2e67c09e2dec07fc810017aa7a581eb17426373a0 Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.830862 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" event={"ID":"060f8233-5012-4f66-8a98-2c0515fc54af","Type":"ContainerStarted","Data":"6949328a32b3351f54cc6b3eecb33d7415ac5398623cbee804afeab0eccd6652"} Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.831189 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" event={"ID":"060f8233-5012-4f66-8a98-2c0515fc54af","Type":"ContainerStarted","Data":"eb42954cf6e13325cf25c3e2e67c09e2dec07fc810017aa7a581eb17426373a0"} Jan 26 11:15:00 crc kubenswrapper[5003]: I0126 11:15:00.844981 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" podStartSLOduration=0.844959254 podStartE2EDuration="844.959254ms" podCreationTimestamp="2026-01-26 11:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 11:15:00.843356699 +0000 UTC m=+1916.384582280" watchObservedRunningTime="2026-01-26 11:15:00.844959254 +0000 UTC m=+1916.386184815" Jan 26 11:15:01 crc kubenswrapper[5003]: I0126 11:15:01.838492 5003 generic.go:334] "Generic (PLEG): container finished" podID="060f8233-5012-4f66-8a98-2c0515fc54af" containerID="6949328a32b3351f54cc6b3eecb33d7415ac5398623cbee804afeab0eccd6652" exitCode=0 Jan 26 11:15:01 crc kubenswrapper[5003]: I0126 11:15:01.838557 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" event={"ID":"060f8233-5012-4f66-8a98-2c0515fc54af","Type":"ContainerDied","Data":"6949328a32b3351f54cc6b3eecb33d7415ac5398623cbee804afeab0eccd6652"} Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.101562 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.248307 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26whd\" (UniqueName: \"kubernetes.io/projected/060f8233-5012-4f66-8a98-2c0515fc54af-kube-api-access-26whd\") pod \"060f8233-5012-4f66-8a98-2c0515fc54af\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.248456 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/060f8233-5012-4f66-8a98-2c0515fc54af-secret-volume\") pod \"060f8233-5012-4f66-8a98-2c0515fc54af\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.248526 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/060f8233-5012-4f66-8a98-2c0515fc54af-config-volume\") pod \"060f8233-5012-4f66-8a98-2c0515fc54af\" (UID: \"060f8233-5012-4f66-8a98-2c0515fc54af\") " Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.249347 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/060f8233-5012-4f66-8a98-2c0515fc54af-config-volume" (OuterVolumeSpecName: "config-volume") pod "060f8233-5012-4f66-8a98-2c0515fc54af" (UID: "060f8233-5012-4f66-8a98-2c0515fc54af"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.253609 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/060f8233-5012-4f66-8a98-2c0515fc54af-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "060f8233-5012-4f66-8a98-2c0515fc54af" (UID: "060f8233-5012-4f66-8a98-2c0515fc54af"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.257550 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/060f8233-5012-4f66-8a98-2c0515fc54af-kube-api-access-26whd" (OuterVolumeSpecName: "kube-api-access-26whd") pod "060f8233-5012-4f66-8a98-2c0515fc54af" (UID: "060f8233-5012-4f66-8a98-2c0515fc54af"). InnerVolumeSpecName "kube-api-access-26whd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.350019 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/060f8233-5012-4f66-8a98-2c0515fc54af-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.350060 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/060f8233-5012-4f66-8a98-2c0515fc54af-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.350075 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26whd\" (UniqueName: \"kubernetes.io/projected/060f8233-5012-4f66-8a98-2c0515fc54af-kube-api-access-26whd\") on node \"crc\" DevicePath \"\"" Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.851826 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" event={"ID":"060f8233-5012-4f66-8a98-2c0515fc54af","Type":"ContainerDied","Data":"eb42954cf6e13325cf25c3e2e67c09e2dec07fc810017aa7a581eb17426373a0"} Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.851872 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb42954cf6e13325cf25c3e2e67c09e2dec07fc810017aa7a581eb17426373a0" Jan 26 11:15:03 crc kubenswrapper[5003]: I0126 11:15:03.851903 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490435-l8w5n" Jan 26 11:15:14 crc kubenswrapper[5003]: I0126 11:15:14.940918 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-m7jxn_0f4f27a6-9cd9-4b96-90d2-dd695d64362c/kube-rbac-proxy/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.003097 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-m7jxn_0f4f27a6-9cd9-4b96-90d2-dd695d64362c/controller/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.137125 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-frr-files/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.292197 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-frr-files/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.298684 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-metrics/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.316886 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-reloader/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.319681 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-reloader/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.446174 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-frr-files/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.487814 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-metrics/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.488823 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-metrics/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.502165 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-reloader/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.632152 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-frr-files/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.639476 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-reloader/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.644638 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/controller/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.672269 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/cp-metrics/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.803362 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/kube-rbac-proxy/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.816821 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/kube-rbac-proxy-frr/0.log" Jan 26 11:15:15 crc kubenswrapper[5003]: I0126 11:15:15.820140 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/frr-metrics/0.log" Jan 26 11:15:16 crc kubenswrapper[5003]: I0126 11:15:16.040852 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/reloader/0.log" Jan 26 11:15:16 crc kubenswrapper[5003]: I0126 11:15:16.057731 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-75hzw_b36ce522-21fc-49c5-a4ff-6a6680060a85/frr-k8s-webhook-server/0.log" Jan 26 11:15:16 crc kubenswrapper[5003]: I0126 11:15:16.222728 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7dbc7b9d98-qtfvg_cbdc93bb-9371-4da1-843d-e5ec38ca21fd/manager/0.log" Jan 26 11:15:16 crc kubenswrapper[5003]: I0126 11:15:16.370971 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6ff7ff8d4c-slczq_69d7a94b-db77-4b43-8fce-9ab7a114d24c/webhook-server/0.log" Jan 26 11:15:16 crc kubenswrapper[5003]: I0126 11:15:16.383869 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gplmk_8ffa174f-aed7-4f5b-9feb-62a45fc68d0b/frr/0.log" Jan 26 11:15:16 crc kubenswrapper[5003]: I0126 11:15:16.454851 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bfmvk_104f69d2-ed9b-4607-82f2-649871e3c881/kube-rbac-proxy/0.log" Jan 26 11:15:16 crc kubenswrapper[5003]: I0126 11:15:16.652692 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bfmvk_104f69d2-ed9b-4607-82f2-649871e3c881/speaker/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.024266 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/util/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.156466 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/pull/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.173517 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/util/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.198929 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/pull/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.348630 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/extract/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.363502 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/util/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.363509 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dclsn9q_57d5e63e-8e5c-4a9f-ac8b-175ca05409ba/pull/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.519691 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-utilities/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.635216 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-utilities/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.652540 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-content/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.678733 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-content/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.816032 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-utilities/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.894380 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/extract-content/0.log" Jan 26 11:15:41 crc kubenswrapper[5003]: I0126 11:15:41.993593 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-utilities/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.102055 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6lx88_2252190e-b3ad-44ff-9973-881f9e111836/registry-server/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.186765 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-content/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.189772 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-utilities/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.219999 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-content/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.362059 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-content/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.376310 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/extract-utilities/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.547756 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-ljw7m_4f101492-8469-482f-a258-7a3a4e9fade0/marketplace-operator/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.566343 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hlj9_dc3bde44-36d1-42a3-9dc5-3d8205c1ccec/registry-server/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.572509 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-utilities/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.753405 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-utilities/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.759932 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-content/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.769701 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-content/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.911562 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-content/0.log" Jan 26 11:15:42 crc kubenswrapper[5003]: I0126 11:15:42.946334 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/extract-utilities/0.log" Jan 26 11:15:43 crc kubenswrapper[5003]: I0126 11:15:43.007070 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lrt87_bfe9e96c-fa33-4e01-beec-4e038b6ba28c/registry-server/0.log" Jan 26 11:15:43 crc kubenswrapper[5003]: I0126 11:15:43.078613 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-utilities/0.log" Jan 26 11:15:43 crc kubenswrapper[5003]: I0126 11:15:43.214856 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-utilities/0.log" Jan 26 11:15:43 crc kubenswrapper[5003]: I0126 11:15:43.222348 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-content/0.log" Jan 26 11:15:43 crc kubenswrapper[5003]: I0126 11:15:43.241940 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-content/0.log" Jan 26 11:15:43 crc kubenswrapper[5003]: I0126 11:15:43.409439 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-utilities/0.log" Jan 26 11:15:43 crc kubenswrapper[5003]: I0126 11:15:43.439181 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/extract-content/0.log" Jan 26 11:15:43 crc kubenswrapper[5003]: I0126 11:15:43.712031 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s9pdl_e8bd36b8-4cf2-435c-8fc9-e3eeef2b4b92/registry-server/0.log" Jan 26 11:16:09 crc kubenswrapper[5003]: I0126 11:16:09.039863 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:16:09 crc kubenswrapper[5003]: I0126 11:16:09.040663 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:16:39 crc kubenswrapper[5003]: I0126 11:16:39.040370 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:16:39 crc kubenswrapper[5003]: I0126 11:16:39.040924 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:16:52 crc kubenswrapper[5003]: I0126 11:16:52.505626 5003 generic.go:334] "Generic (PLEG): container finished" podID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerID="e269b69905c16e15dd6e76d854520f35d1804bdb7ece19aa2f488492ae82ecd1" exitCode=0 Jan 26 11:16:52 crc kubenswrapper[5003]: I0126 11:16:52.505712 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-klk7q/must-gather-5g8tw" event={"ID":"2e0201d1-f503-49a7-8c16-f1822619ec88","Type":"ContainerDied","Data":"e269b69905c16e15dd6e76d854520f35d1804bdb7ece19aa2f488492ae82ecd1"} Jan 26 11:16:52 crc kubenswrapper[5003]: I0126 11:16:52.506514 5003 scope.go:117] "RemoveContainer" containerID="e269b69905c16e15dd6e76d854520f35d1804bdb7ece19aa2f488492ae82ecd1" Jan 26 11:16:53 crc kubenswrapper[5003]: I0126 11:16:53.410668 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-klk7q_must-gather-5g8tw_2e0201d1-f503-49a7-8c16-f1822619ec88/gather/0.log" Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.349847 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-klk7q/must-gather-5g8tw"] Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.352176 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-klk7q/must-gather-5g8tw" podUID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerName="copy" containerID="cri-o://e77bb569d8c21b68e95d7d744a97fa40b3d4f7fbeb79fd82233bbfc5905957d6" gracePeriod=2 Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.353698 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-klk7q/must-gather-5g8tw"] Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.568731 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-klk7q_must-gather-5g8tw_2e0201d1-f503-49a7-8c16-f1822619ec88/copy/0.log" Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.569016 5003 generic.go:334] "Generic (PLEG): container finished" podID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerID="e77bb569d8c21b68e95d7d744a97fa40b3d4f7fbeb79fd82233bbfc5905957d6" exitCode=143 Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.700633 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-klk7q_must-gather-5g8tw_2e0201d1-f503-49a7-8c16-f1822619ec88/copy/0.log" Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.703553 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.768920 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2e0201d1-f503-49a7-8c16-f1822619ec88-must-gather-output\") pod \"2e0201d1-f503-49a7-8c16-f1822619ec88\" (UID: \"2e0201d1-f503-49a7-8c16-f1822619ec88\") " Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.769040 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59lgw\" (UniqueName: \"kubernetes.io/projected/2e0201d1-f503-49a7-8c16-f1822619ec88-kube-api-access-59lgw\") pod \"2e0201d1-f503-49a7-8c16-f1822619ec88\" (UID: \"2e0201d1-f503-49a7-8c16-f1822619ec88\") " Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.780550 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e0201d1-f503-49a7-8c16-f1822619ec88-kube-api-access-59lgw" (OuterVolumeSpecName: "kube-api-access-59lgw") pod "2e0201d1-f503-49a7-8c16-f1822619ec88" (UID: "2e0201d1-f503-49a7-8c16-f1822619ec88"). InnerVolumeSpecName "kube-api-access-59lgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.852920 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e0201d1-f503-49a7-8c16-f1822619ec88-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "2e0201d1-f503-49a7-8c16-f1822619ec88" (UID: "2e0201d1-f503-49a7-8c16-f1822619ec88"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.870144 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59lgw\" (UniqueName: \"kubernetes.io/projected/2e0201d1-f503-49a7-8c16-f1822619ec88-kube-api-access-59lgw\") on node \"crc\" DevicePath \"\"" Jan 26 11:17:02 crc kubenswrapper[5003]: I0126 11:17:02.870181 5003 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2e0201d1-f503-49a7-8c16-f1822619ec88-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 26 11:17:03 crc kubenswrapper[5003]: I0126 11:17:03.008687 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e0201d1-f503-49a7-8c16-f1822619ec88" path="/var/lib/kubelet/pods/2e0201d1-f503-49a7-8c16-f1822619ec88/volumes" Jan 26 11:17:03 crc kubenswrapper[5003]: I0126 11:17:03.577005 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-klk7q_must-gather-5g8tw_2e0201d1-f503-49a7-8c16-f1822619ec88/copy/0.log" Jan 26 11:17:03 crc kubenswrapper[5003]: I0126 11:17:03.577420 5003 scope.go:117] "RemoveContainer" containerID="e77bb569d8c21b68e95d7d744a97fa40b3d4f7fbeb79fd82233bbfc5905957d6" Jan 26 11:17:03 crc kubenswrapper[5003]: I0126 11:17:03.577534 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-klk7q/must-gather-5g8tw" Jan 26 11:17:03 crc kubenswrapper[5003]: I0126 11:17:03.607428 5003 scope.go:117] "RemoveContainer" containerID="e269b69905c16e15dd6e76d854520f35d1804bdb7ece19aa2f488492ae82ecd1" Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.040180 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.041005 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.041190 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.043454 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6f8f59bbd54d6b22048eff02c77837365fc23e772c25a8528cb3daf5a8a768eb"} pod="openshift-machine-config-operator/machine-config-daemon-m84kp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.043601 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" containerID="cri-o://6f8f59bbd54d6b22048eff02c77837365fc23e772c25a8528cb3daf5a8a768eb" gracePeriod=600 Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.619305 5003 generic.go:334] "Generic (PLEG): container finished" podID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerID="6f8f59bbd54d6b22048eff02c77837365fc23e772c25a8528cb3daf5a8a768eb" exitCode=0 Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.619691 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerDied","Data":"6f8f59bbd54d6b22048eff02c77837365fc23e772c25a8528cb3daf5a8a768eb"} Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.619735 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" event={"ID":"c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd","Type":"ContainerStarted","Data":"7db6d3b40f6faf792495afffccc0bd755ae10a49453ec0bd9e9454209fbd2afd"} Jan 26 11:17:09 crc kubenswrapper[5003]: I0126 11:17:09.619762 5003 scope.go:117] "RemoveContainer" containerID="fd493fb6bf7d26d1384d6055009fa9b6eff08b01fe28831769b5174d9d440aa2" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.278300 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lmhqz"] Jan 26 11:18:46 crc kubenswrapper[5003]: E0126 11:18:46.279169 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="060f8233-5012-4f66-8a98-2c0515fc54af" containerName="collect-profiles" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.279188 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="060f8233-5012-4f66-8a98-2c0515fc54af" containerName="collect-profiles" Jan 26 11:18:46 crc kubenswrapper[5003]: E0126 11:18:46.279214 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerName="copy" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.279224 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerName="copy" Jan 26 11:18:46 crc kubenswrapper[5003]: E0126 11:18:46.279248 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerName="gather" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.279259 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerName="gather" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.279427 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="060f8233-5012-4f66-8a98-2c0515fc54af" containerName="collect-profiles" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.279463 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerName="gather" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.279476 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e0201d1-f503-49a7-8c16-f1822619ec88" containerName="copy" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.280862 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.303127 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lmhqz"] Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.305394 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-utilities\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.305452 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lrj5\" (UniqueName: \"kubernetes.io/projected/88fccf68-f789-4f4d-86fe-a7f97139b34d-kube-api-access-7lrj5\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.305593 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-catalog-content\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.406814 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-catalog-content\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.406868 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-utilities\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.406899 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lrj5\" (UniqueName: \"kubernetes.io/projected/88fccf68-f789-4f4d-86fe-a7f97139b34d-kube-api-access-7lrj5\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.407534 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-utilities\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.407636 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-catalog-content\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.442610 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lrj5\" (UniqueName: \"kubernetes.io/projected/88fccf68-f789-4f4d-86fe-a7f97139b34d-kube-api-access-7lrj5\") pod \"certified-operators-lmhqz\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.609274 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:46 crc kubenswrapper[5003]: I0126 11:18:46.902713 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lmhqz"] Jan 26 11:18:47 crc kubenswrapper[5003]: I0126 11:18:47.395733 5003 generic.go:334] "Generic (PLEG): container finished" podID="88fccf68-f789-4f4d-86fe-a7f97139b34d" containerID="2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f" exitCode=0 Jan 26 11:18:47 crc kubenswrapper[5003]: I0126 11:18:47.395801 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmhqz" event={"ID":"88fccf68-f789-4f4d-86fe-a7f97139b34d","Type":"ContainerDied","Data":"2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f"} Jan 26 11:18:47 crc kubenswrapper[5003]: I0126 11:18:47.396105 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmhqz" event={"ID":"88fccf68-f789-4f4d-86fe-a7f97139b34d","Type":"ContainerStarted","Data":"b65d95b56088b3c7647cc3222b4a639cc9848d9d92e6bf6f1711079fa3d66db3"} Jan 26 11:18:47 crc kubenswrapper[5003]: I0126 11:18:47.398038 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 11:18:49 crc kubenswrapper[5003]: I0126 11:18:49.408651 5003 generic.go:334] "Generic (PLEG): container finished" podID="88fccf68-f789-4f4d-86fe-a7f97139b34d" containerID="9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e" exitCode=0 Jan 26 11:18:49 crc kubenswrapper[5003]: I0126 11:18:49.408725 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmhqz" event={"ID":"88fccf68-f789-4f4d-86fe-a7f97139b34d","Type":"ContainerDied","Data":"9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e"} Jan 26 11:18:50 crc kubenswrapper[5003]: I0126 11:18:50.416701 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmhqz" event={"ID":"88fccf68-f789-4f4d-86fe-a7f97139b34d","Type":"ContainerStarted","Data":"ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953"} Jan 26 11:18:50 crc kubenswrapper[5003]: I0126 11:18:50.438662 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lmhqz" podStartSLOduration=1.947189281 podStartE2EDuration="4.43864248s" podCreationTimestamp="2026-01-26 11:18:46 +0000 UTC" firstStartedPulling="2026-01-26 11:18:47.397579142 +0000 UTC m=+2142.938804743" lastFinishedPulling="2026-01-26 11:18:49.889032341 +0000 UTC m=+2145.430257942" observedRunningTime="2026-01-26 11:18:50.438365063 +0000 UTC m=+2145.979590634" watchObservedRunningTime="2026-01-26 11:18:50.43864248 +0000 UTC m=+2145.979868051" Jan 26 11:18:56 crc kubenswrapper[5003]: I0126 11:18:56.610258 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:56 crc kubenswrapper[5003]: I0126 11:18:56.610481 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:56 crc kubenswrapper[5003]: I0126 11:18:56.671940 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:57 crc kubenswrapper[5003]: I0126 11:18:57.534377 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:18:57 crc kubenswrapper[5003]: I0126 11:18:57.585258 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lmhqz"] Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.332626 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q9kp5"] Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.333982 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.347067 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q9kp5"] Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.477260 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lmhqz" podUID="88fccf68-f789-4f4d-86fe-a7f97139b34d" containerName="registry-server" containerID="cri-o://ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953" gracePeriod=2 Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.490835 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6325c650-5c90-4c77-a62f-56550fe0a987-utilities\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.490947 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6325c650-5c90-4c77-a62f-56550fe0a987-catalog-content\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.490987 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6stt4\" (UniqueName: \"kubernetes.io/projected/6325c650-5c90-4c77-a62f-56550fe0a987-kube-api-access-6stt4\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.592389 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6325c650-5c90-4c77-a62f-56550fe0a987-utilities\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.592831 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6325c650-5c90-4c77-a62f-56550fe0a987-catalog-content\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.592862 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6stt4\" (UniqueName: \"kubernetes.io/projected/6325c650-5c90-4c77-a62f-56550fe0a987-kube-api-access-6stt4\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.593313 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6325c650-5c90-4c77-a62f-56550fe0a987-catalog-content\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.593565 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6325c650-5c90-4c77-a62f-56550fe0a987-utilities\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.613817 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6stt4\" (UniqueName: \"kubernetes.io/projected/6325c650-5c90-4c77-a62f-56550fe0a987-kube-api-access-6stt4\") pod \"redhat-operators-q9kp5\" (UID: \"6325c650-5c90-4c77-a62f-56550fe0a987\") " pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.667137 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:18:59 crc kubenswrapper[5003]: I0126 11:18:59.933027 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q9kp5"] Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.317863 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.416199 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-catalog-content\") pod \"88fccf68-f789-4f4d-86fe-a7f97139b34d\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.416251 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-utilities\") pod \"88fccf68-f789-4f4d-86fe-a7f97139b34d\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.416352 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lrj5\" (UniqueName: \"kubernetes.io/projected/88fccf68-f789-4f4d-86fe-a7f97139b34d-kube-api-access-7lrj5\") pod \"88fccf68-f789-4f4d-86fe-a7f97139b34d\" (UID: \"88fccf68-f789-4f4d-86fe-a7f97139b34d\") " Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.418449 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-utilities" (OuterVolumeSpecName: "utilities") pod "88fccf68-f789-4f4d-86fe-a7f97139b34d" (UID: "88fccf68-f789-4f4d-86fe-a7f97139b34d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.424982 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88fccf68-f789-4f4d-86fe-a7f97139b34d-kube-api-access-7lrj5" (OuterVolumeSpecName: "kube-api-access-7lrj5") pod "88fccf68-f789-4f4d-86fe-a7f97139b34d" (UID: "88fccf68-f789-4f4d-86fe-a7f97139b34d"). InnerVolumeSpecName "kube-api-access-7lrj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.484871 5003 generic.go:334] "Generic (PLEG): container finished" podID="88fccf68-f789-4f4d-86fe-a7f97139b34d" containerID="ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953" exitCode=0 Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.484927 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmhqz" event={"ID":"88fccf68-f789-4f4d-86fe-a7f97139b34d","Type":"ContainerDied","Data":"ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953"} Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.484966 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lmhqz" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.484989 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmhqz" event={"ID":"88fccf68-f789-4f4d-86fe-a7f97139b34d","Type":"ContainerDied","Data":"b65d95b56088b3c7647cc3222b4a639cc9848d9d92e6bf6f1711079fa3d66db3"} Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.485013 5003 scope.go:117] "RemoveContainer" containerID="ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.486100 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88fccf68-f789-4f4d-86fe-a7f97139b34d" (UID: "88fccf68-f789-4f4d-86fe-a7f97139b34d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.486846 5003 generic.go:334] "Generic (PLEG): container finished" podID="6325c650-5c90-4c77-a62f-56550fe0a987" containerID="469d0a739f1147bab83c29bba51d7c5cafcd071e0212cd5ff2239a31ecdaf65d" exitCode=0 Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.486881 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q9kp5" event={"ID":"6325c650-5c90-4c77-a62f-56550fe0a987","Type":"ContainerDied","Data":"469d0a739f1147bab83c29bba51d7c5cafcd071e0212cd5ff2239a31ecdaf65d"} Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.486908 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q9kp5" event={"ID":"6325c650-5c90-4c77-a62f-56550fe0a987","Type":"ContainerStarted","Data":"31b70396bd189f28a6a8f1cfd3136894b9d9d13dee34e9f665ccaa9999e6a10a"} Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.505794 5003 scope.go:117] "RemoveContainer" containerID="9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.518055 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.518098 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fccf68-f789-4f4d-86fe-a7f97139b34d-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.518117 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lrj5\" (UniqueName: \"kubernetes.io/projected/88fccf68-f789-4f4d-86fe-a7f97139b34d-kube-api-access-7lrj5\") on node \"crc\" DevicePath \"\"" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.526980 5003 scope.go:117] "RemoveContainer" containerID="2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.541712 5003 scope.go:117] "RemoveContainer" containerID="ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953" Jan 26 11:19:00 crc kubenswrapper[5003]: E0126 11:19:00.542164 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953\": container with ID starting with ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953 not found: ID does not exist" containerID="ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.542203 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953"} err="failed to get container status \"ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953\": rpc error: code = NotFound desc = could not find container \"ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953\": container with ID starting with ed3b2c893281163d416684928254025fa3324e36b4c35d400e37812f72860953 not found: ID does not exist" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.542225 5003 scope.go:117] "RemoveContainer" containerID="9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e" Jan 26 11:19:00 crc kubenswrapper[5003]: E0126 11:19:00.542510 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e\": container with ID starting with 9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e not found: ID does not exist" containerID="9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.542538 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e"} err="failed to get container status \"9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e\": rpc error: code = NotFound desc = could not find container \"9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e\": container with ID starting with 9c3b6c33f87f85f2180733bb307802315444afe657539a8b31b3761595d5ad4e not found: ID does not exist" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.542558 5003 scope.go:117] "RemoveContainer" containerID="2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f" Jan 26 11:19:00 crc kubenswrapper[5003]: E0126 11:19:00.543024 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f\": container with ID starting with 2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f not found: ID does not exist" containerID="2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.543053 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f"} err="failed to get container status \"2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f\": rpc error: code = NotFound desc = could not find container \"2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f\": container with ID starting with 2dc2265cac42e7d76b14c9b4dbeabf7e9ad26d506c55c78064b5b7e79bb64f0f not found: ID does not exist" Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.810188 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lmhqz"] Jan 26 11:19:00 crc kubenswrapper[5003]: I0126 11:19:00.817076 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lmhqz"] Jan 26 11:19:01 crc kubenswrapper[5003]: I0126 11:19:01.008368 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88fccf68-f789-4f4d-86fe-a7f97139b34d" path="/var/lib/kubelet/pods/88fccf68-f789-4f4d-86fe-a7f97139b34d/volumes" Jan 26 11:19:01 crc kubenswrapper[5003]: I0126 11:19:01.499971 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q9kp5" event={"ID":"6325c650-5c90-4c77-a62f-56550fe0a987","Type":"ContainerStarted","Data":"4bcddbe6670e67e2ec20680fb0f759a6374b148f4c003ce15a7225c66bb1f3d8"} Jan 26 11:19:02 crc kubenswrapper[5003]: I0126 11:19:02.510097 5003 generic.go:334] "Generic (PLEG): container finished" podID="6325c650-5c90-4c77-a62f-56550fe0a987" containerID="4bcddbe6670e67e2ec20680fb0f759a6374b148f4c003ce15a7225c66bb1f3d8" exitCode=0 Jan 26 11:19:02 crc kubenswrapper[5003]: I0126 11:19:02.510146 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q9kp5" event={"ID":"6325c650-5c90-4c77-a62f-56550fe0a987","Type":"ContainerDied","Data":"4bcddbe6670e67e2ec20680fb0f759a6374b148f4c003ce15a7225c66bb1f3d8"} Jan 26 11:19:03 crc kubenswrapper[5003]: I0126 11:19:03.521500 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q9kp5" event={"ID":"6325c650-5c90-4c77-a62f-56550fe0a987","Type":"ContainerStarted","Data":"2d4a713dc980aab57eafb3eb343cd36e4bd9d2c6a8d46db6df6a8f001bb52a62"} Jan 26 11:19:03 crc kubenswrapper[5003]: I0126 11:19:03.550817 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q9kp5" podStartSLOduration=2.0907833829999998 podStartE2EDuration="4.550794344s" podCreationTimestamp="2026-01-26 11:18:59 +0000 UTC" firstStartedPulling="2026-01-26 11:19:00.489154475 +0000 UTC m=+2156.030380046" lastFinishedPulling="2026-01-26 11:19:02.949165406 +0000 UTC m=+2158.490391007" observedRunningTime="2026-01-26 11:19:03.543782956 +0000 UTC m=+2159.085008547" watchObservedRunningTime="2026-01-26 11:19:03.550794344 +0000 UTC m=+2159.092019945" Jan 26 11:19:09 crc kubenswrapper[5003]: I0126 11:19:09.040665 5003 patch_prober.go:28] interesting pod/machine-config-daemon-m84kp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 11:19:09 crc kubenswrapper[5003]: I0126 11:19:09.041087 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m84kp" podUID="c9e56ffa-1020-4f9f-b2f4-cd11ed3850bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 11:19:10 crc kubenswrapper[5003]: I0126 11:19:09.668002 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q9kp5" Jan 26 11:19:10 crc kubenswrapper[5003]: I0126 11:19:09.668404 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q9kp5"